From d118477d8a06fe588d7b0668e18464e2c856bbb1 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 31 Jul 2024 01:13:31 -0700 Subject: [PATCH 01/61] add dynamictable, vectordata, vectorindex mixins --- docs/index.md | 1 + docs/intro/translation.md | 5 + docs/meta/references.md | 11 + .../src/nwb_linkml/generators/pydantic.py | 37 ++- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 243 ++++++++++++++++-- nwb_linkml/src/nwb_linkml/io/schema.py | 10 +- nwb_linkml/src/nwb_linkml/providers/linkml.py | 4 +- nwb_linkml/tests/fixtures.py | 23 ++ nwb_linkml/tests/test_includes/test_hdmf.py | 14 +- scripts/generate_core.py | 53 ++-- 10 files changed, 361 insertions(+), 40 deletions(-) create mode 100644 docs/meta/references.md diff --git a/docs/index.md b/docs/index.md index 9ffc483..32ed37e 100644 --- a/docs/index.md +++ b/docs/index.md @@ -284,6 +284,7 @@ api/nwb_linkml/schema/index meta/todo meta/changelog +meta/references genindex ``` diff --git a/docs/intro/translation.md b/docs/intro/translation.md index d5c078a..6170fee 100644 --- a/docs/intro/translation.md +++ b/docs/intro/translation.md @@ -20,6 +20,11 @@ ### DynamicTable +```{note} +See the [DynamicTable](https://hdmf-common-schema.readthedocs.io/en/stable/format_description.html#dynamictable) +reference docs +``` + One of the major special cases in NWB is the use of `DynamicTable` to contain tabular data that contains columns that are not in the base spec. diff --git a/docs/meta/references.md b/docs/meta/references.md new file mode 100644 index 0000000..dd36a1a --- /dev/null +++ b/docs/meta/references.md @@ -0,0 +1,11 @@ +# References + +## Documentation + +- [hdmf](https://hdmf.readthedocs.io/en/stable/) +- [hdmf-common-schema](https://hdmf-common-schema.readthedocs.io/en/stable/) +- [pynwb](https://pynwb.readthedocs.io/en/latest/) + +```{todo} +Add the bibtex refs to NWB papers :) +``` \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index b42c83a..3ecf605 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -40,7 +40,7 @@ from types import ModuleType from typing import ClassVar, Dict, List, Optional, Tuple, Type, Union from linkml.generators import PydanticGenerator -from linkml.generators.pydanticgen.build import SlotResult +from linkml.generators.pydanticgen.build import SlotResult, ClassResult from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray from linkml.generators.pydanticgen.template import PydanticModule, Import, Imports from linkml_runtime.linkml_model.meta import ( @@ -63,6 +63,7 @@ from pydantic import BaseModel from nwb_linkml.maps import flat_to_nptyping from nwb_linkml.maps.naming import module_case, version_module_case from nwb_linkml.includes.types import ModelTypeString, _get_name, NamedString, NamedImports +from nwb_linkml.includes.hdmf import DYNAMIC_TABLE_IMPORTS, DYNAMIC_TABLE_INJECTS OPTIONAL_PATTERN = re.compile(r"Optional\[([\w\.]*)\]") @@ -96,6 +97,9 @@ class NWBPydanticGenerator(PydanticGenerator): def _check_anyof( self, s: SlotDefinition, sn: SlotDefinitionName, sv: SchemaView ): # pragma: no cover + """ + Overridden to allow `array` in any_of + """ # Confirm that the original slot range (ignoring the default that comes in from # induced_slot) isn't in addition to setting any_of allowed_keys = ("array",) @@ -127,6 +131,10 @@ class NWBPydanticGenerator(PydanticGenerator): return slot + def after_generate_class(self, cls: ClassResult, sv: SchemaView) -> ClassResult: + cls = AfterGenerateClass.inject_dynamictable(cls) + return cls + def before_render_template(self, template: PydanticModule, sv: SchemaView) -> PydanticModule: if "source_file" in template.meta: del template.meta["source_file"] @@ -226,6 +234,33 @@ class AfterGenerateSlot: slot.imports = NamedImports return slot +class AfterGenerateClass: + """ + Container class for class-modification methods + """ + + @staticmethod + def inject_dynamictable(cls: ClassResult) -> ClassResult: + if cls.cls.name == "DynamicTable": + cls.cls.bases = ["DynamicTableMixin"] + + if cls.injected_classes is None: + cls.injected_classes = DYNAMIC_TABLE_INJECTS.copy() + else: + cls.injected_classes.extend(DYNAMIC_TABLE_INJECTS.copy()) + + if isinstance(cls.imports, Imports): + cls.imports += DYNAMIC_TABLE_IMPORTS + elif isinstance(cls.imports, list): + cls.imports = Imports(imports=cls.imports) + DYNAMIC_TABLE_IMPORTS + else: + cls.imports = DYNAMIC_TABLE_IMPORTS.model_copy() + elif cls.cls.name == "VectorData": + cls.cls.bases = ["VectorDataMixin"] + elif cls.cls.name == "VectorIndex": + cls.cls.bases = ["VectorIndexMixin"] + return cls + def compile_python( text_or_fn: str, package_path: Path = None, module_name: str = "test" diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 32647e7..fdbd355 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -1,39 +1,248 @@ """ Special types for mimicking HDMF special case behavior """ +from typing import Any, ClassVar, Dict, List, Optional, Union, Tuple, overload, TYPE_CHECKING -from typing import Any -from pydantic import BaseModel, ConfigDict +from linkml.generators.pydanticgen.template import Imports, Import, ObjectImport +from numpydantic import NDArray +from pandas import DataFrame +from pydantic import BaseModel, ConfigDict, Field, model_validator + +if TYPE_CHECKING: + from nwb_linkml.models import VectorData, VectorIndex class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) """ model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ("name", "colnames", "description",) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return { + k: getattr(self, k) for i, k in enumerate(self.colnames) + } + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int,slice], ...]) -> Union[DataFrame, list, "NDArray", "VectorData",]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__(self, item: Union[str, int, slice, Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...],]) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + f"DynamicTables are 2-dimensional, can't index with more than 2 indices like {item}") + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + + def _slice_range(self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = { + k: self._columns[k][rows] for k in cols + } + return data - # @model_validator(mode='after') - # def ensure_equal_length(cls, model: 'DynamicTableMixin') -> 'DynamicTableMixin': - # """ - # Ensure all vectors are of equal length - # """ - # raise NotImplementedError('TODO') - # - # @model_validator(mode="after") - # def create_index_backrefs(cls, model: 'DynamicTableMixin') -> 'DynamicTableMixin': - # """ - # Ensure that vectordata with vectorindexes know about them - # """ - # raise NotImplementedError('TODO') - def __getitem__(self, item: str) -> Any: - raise NotImplementedError("TODO") def __setitem__(self, key: str, value: Any) -> None: raise NotImplementedError("TODO") + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, '__pydantic_complete__', False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith('_index'): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if 'colnames' not in model: + colnames = [k for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith('_index')] + model['colnames'] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [k for k in model.keys() if + k not in cls.NON_COLUMN_FIELDS + and not k.endswith('_index') + and k not in model['colnames'].keys() + ] + model['colnames'].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +DYNAMIC_TABLE_IMPORTS = Imports( + imports = [ + Import(module="pandas", objects=[ObjectImport(name="DataFrame")]), + Import(module="typing", objects=[ObjectImport(name="ClassVar"), ObjectImport(name="overload"), ObjectImport(name="Tuple")]), + Import(module='numpydantic', objects=[ObjectImport(name='NDArray')]), + Import(module="pydantic", objects=[ObjectImport(name="model_validator")]) + ] +) +""" +Imports required for the dynamic table mixin + +VectorData is purposefully excluded as an import or an inject so that it will be +resolved to the VectorData definition in the generated module +""" +DYNAMIC_TABLE_INJECTS = [VectorDataMixin, VectorIndexMixin, DynamicTableMixin] # class VectorDataMixin(BaseModel): # index: Optional[BaseModel] = None diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py index 3e2a76e..d5ce5c8 100644 --- a/nwb_linkml/src/nwb_linkml/io/schema.py +++ b/nwb_linkml/src/nwb_linkml/io/schema.py @@ -120,7 +120,7 @@ def load_namespace_adapter( return adapter -def load_nwb_core(core_version: str = "2.7.0", hdmf_version: str = "1.8.0") -> NamespacesAdapter: +def load_nwb_core(core_version: str = "2.7.0", hdmf_version: str = "1.8.0", hdmf_only:bool=False) -> NamespacesAdapter: """ Convenience function for loading the NWB core schema + hdmf-common as a namespace adapter. @@ -136,14 +136,18 @@ def load_nwb_core(core_version: str = "2.7.0", hdmf_version: str = "1.8.0") -> N Args: core_version (str): an entry in :attr:`.NWB_CORE_REPO.versions` hdmf_version (str): an entry in :attr:`.NWB_CORE_REPO.versions` + hdmf_only (bool): Only return the hdmf common schema Returns: """ # First get hdmf-common: hdmf_schema = load_namespace_adapter(HDMF_COMMON_REPO, version=hdmf_version) - schema = load_namespace_adapter(NWB_CORE_REPO, version=core_version) + if hdmf_only: + schema = hdmf_schema + else: + schema = load_namespace_adapter(NWB_CORE_REPO, version=core_version) - schema.imported.append(hdmf_schema) + schema.imported.append(hdmf_schema) return schema diff --git a/nwb_linkml/src/nwb_linkml/providers/linkml.py b/nwb_linkml/src/nwb_linkml/providers/linkml.py index 831bd2c..f868de7 100644 --- a/nwb_linkml/src/nwb_linkml/providers/linkml.py +++ b/nwb_linkml/src/nwb_linkml/providers/linkml.py @@ -5,6 +5,7 @@ Provider for LinkML schema built from NWB schema import shutil from pathlib import Path from typing import Dict, Optional, TypedDict +from dataclasses import dataclass from linkml_runtime import SchemaView from linkml_runtime.dumpers import yaml_dumper @@ -19,7 +20,8 @@ from nwb_linkml.ui import AdapterProgress from nwb_schema_language import Namespaces -class LinkMLSchemaBuild(TypedDict): +@dataclass +class LinkMLSchemaBuild: """Build result from :meth:`.LinkMLProvider.build`""" version: str diff --git a/nwb_linkml/tests/fixtures.py b/nwb_linkml/tests/fixtures.py index e4b8fae..092ba60 100644 --- a/nwb_linkml/tests/fixtures.py +++ b/nwb_linkml/tests/fixtures.py @@ -1,6 +1,7 @@ import shutil from dataclasses import dataclass, field from pathlib import Path +from types import ModuleType from typing import Dict, Optional import pytest @@ -14,6 +15,8 @@ from linkml_runtime.linkml_model import ( ) from nwb_linkml.adapters.namespaces import NamespacesAdapter +from nwb_linkml.providers import LinkMLProvider, PydanticProvider +from nwb_linkml.providers.linkml import LinkMLSchemaBuild from nwb_linkml.io import schema as io from nwb_schema_language import Attribute, Dataset, Group @@ -87,6 +90,26 @@ def nwb_core_fixture(request) -> NamespacesAdapter: return nwb_core +@pytest.fixture(scope="session") +def nwb_core_linkml(nwb_core_fixture, tmp_output_dir) -> LinkMLSchemaBuild: + provider = LinkMLProvider(tmp_output_dir, allow_repo=False, verbose=False) + result = provider.build(ns_adapter=nwb_core_fixture, force=True) + return result['core'] + + +@pytest.fixture(scope="session") +def nwb_core_module(nwb_core_linkml: LinkMLSchemaBuild, tmp_output_dir) -> ModuleType: + """ + Generated pydantic namespace from nwb core + """ + provider = PydanticProvider(tmp_output_dir, verbose=False) + result = provider.build(nwb_core_linkml.namespace, force=True) + mod = provider.get('core', version=nwb_core_linkml.version, allow_repo=False) + return mod + + + + @pytest.fixture(scope="session") def data_dir() -> Path: diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 0024917..572a651 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -1,16 +1,19 @@ -from typing import Tuple +from typing import Tuple, TYPE_CHECKING +from types import ModuleType import numpy as np import pytest +# FIXME: Make this just be the output of the provider by patching into import machinery from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( ElectricalSeries, + ElectrodeGroup, NWBFileGeneralExtracellularEphysElectrodes, ) @pytest.fixture() -def electrical_series() -> Tuple[ElectricalSeries, NWBFileGeneralExtracellularEphysElectrodes]: +def electrical_series() -> Tuple["ElectricalSeries", "NWBFileGeneralExtracellularEphysElectrodes"]: """ Demo electrical series with adjoining electrodes """ @@ -19,9 +22,16 @@ def electrical_series() -> Tuple[ElectricalSeries, NWBFileGeneralExtracellularEp data = np.arange(0, n_electrodes * n_times).reshape(n_times, n_electrodes) timestamps = np.linspace(0, 1, n_times) + # electrode group is the physical description of the electrodes + electrode_group = ElectrodeGroup( + name="GroupA", + ) + # make electrodes tables electrodes = NWBFileGeneralExtracellularEphysElectrodes( id=np.arange(0, n_electrodes), x=np.arange(0, n_electrodes), y=np.arange(n_electrodes, n_electrodes * 2), + group=[electrode_group]*n_electrodes, + ) diff --git a/scripts/generate_core.py b/scripts/generate_core.py index e711e0e..b447e00 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -14,13 +14,13 @@ from rich import print from nwb_linkml.generators.pydantic import NWBPydanticGenerator from nwb_linkml.providers import LinkMLProvider, PydanticProvider -from nwb_linkml.providers.git import NWB_CORE_REPO, GitRepo +from nwb_linkml.providers.git import NWB_CORE_REPO, HDMF_COMMON_REPO, GitRepo from nwb_linkml.io import schema as io -def generate_core_yaml(output_path:Path, dry_run:bool=False): +def generate_core_yaml(output_path:Path, dry_run:bool=False, hdmf_only:bool=False): """Just build the latest version of the core schema""" - core = io.load_nwb_core() + core = io.load_nwb_core(hdmf_only=hdmf_only) built_schemas = core.build().schemas for schema in built_schemas: output_file = output_path / (schema.name + '.yaml') @@ -45,11 +45,10 @@ def generate_core_pydantic(yaml_path:Path, output_path:Path, dry_run:bool=False) with open(pydantic_file, 'w') as pfile: pfile.write(gen_pydantic) -def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False): +def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False, repo:GitRepo=NWB_CORE_REPO, hdmf_only=False): """ Generate linkml models for all versions """ - repo = GitRepo(NWB_CORE_REPO) #repo.clone(force=True) repo.clone() @@ -81,7 +80,7 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False): linkml_task = None pydantic_task = None - for version in NWB_CORE_REPO.versions: + for version in repo.namespace.versions: # build linkml try: # check out the version (this should also refresh the hdmf-common schema) @@ -91,9 +90,11 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False): # first load the core namespace core_ns = io.load_namespace_adapter(repo.namespace_file) - # then the hdmf-common namespace - hdmf_common_ns = io.load_namespace_adapter(repo.temp_directory / 'hdmf-common-schema' / 'common' / 'namespace.yaml') - core_ns.imported.append(hdmf_common_ns) + if repo.namespace == NWB_CORE_REPO: + # then the hdmf-common namespace + hdmf_common_ns = io.load_namespace_adapter(repo.temp_directory / 'hdmf-common-schema' / 'common' / 'namespace.yaml') + core_ns.imported.append(hdmf_common_ns) + build_progress.update(linkml_task, advance=1, action="Build LinkML") @@ -101,7 +102,7 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False): build_progress.update(linkml_task, advance=1, action="Built LinkML") # build pydantic - ns_files = [res['namespace'] for res in linkml_res.values()] + ns_files = [res.namespace for res in linkml_res.values()] pydantic_task = build_progress.add_task('', name=version, action='', total=len(ns_files)) for schema in ns_files: @@ -129,10 +130,20 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False): pydantic_task = None if not dry_run: - shutil.rmtree(yaml_path / 'linkml') - shutil.rmtree(pydantic_path / 'pydantic') - shutil.move(tmp_dir / 'linkml', yaml_path) - shutil.move(tmp_dir / 'pydantic', pydantic_path) + if hdmf_only: + shutil.rmtree(yaml_path / 'linkml' / 'hdmf_common') + shutil.rmtree(yaml_path / 'linkml' / 'hdmf_experimental') + shutil.rmtree(pydantic_path / 'pydantic' / 'hdmf_common') + shutil.rmtree(pydantic_path / 'pydantic' / 'hdmf_experimental') + shutil.move(tmp_dir / 'linkml' / 'hdmf_common', yaml_path / 'linkml') + shutil.move(tmp_dir / 'linkml' / 'hdmf_experimental', yaml_path / 'linkml') + shutil.move(tmp_dir / 'pydantic' / 'hdmf_common', pydantic_path / 'pydantic') + shutil.move(tmp_dir / 'pydantic' / 'hdmf_experimental', pydantic_path / 'pydantic') + else: + shutil.rmtree(yaml_path / 'linkml') + shutil.rmtree(pydantic_path / 'pydantic') + shutil.move(tmp_dir / 'linkml', yaml_path) + shutil.move(tmp_dir / 'pydantic', pydantic_path) # import the most recent version of the schemaz we built latest_version = sorted((pydantic_path / 'pydantic' / 'core').iterdir(), key=os.path.getmtime)[-1] @@ -167,6 +178,11 @@ def parser() -> ArgumentParser: type=Path, default=Path(__file__).parent.parent / 'nwb_linkml' / 'src' / 'nwb_linkml' / 'models' ) + parser.add_argument( + '--hdmf', + help="Only generate the HDMF namespaces", + action="store_true" + ) parser.add_argument( '--latest', help="Only generate the latest version of the core schemas.", @@ -182,14 +198,19 @@ def parser() -> ArgumentParser: def main(): args = parser().parse_args() + if args.hdmf: + repo = GitRepo(HDMF_COMMON_REPO) + else: + repo = GitRepo(NWB_CORE_REPO) + if not args.dry_run: args.yaml.mkdir(exist_ok=True) args.pydantic.mkdir(exist_ok=True) if args.latest: - generate_core_yaml(args.yaml, args.dry_run) + generate_core_yaml(args.yaml, args.dry_run, args.hdmf) generate_core_pydantic(args.yaml, args.pydantic, args.dry_run) else: - generate_versions(args.yaml, args.pydantic, args.dry_run) + generate_versions(args.yaml, args.pydantic, args.dry_run, repo, args.hdmf) if __name__ == "__main__": main() From abf1b0e6c015b9dd0dddd3f75de49cd53fdfa6f9 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 31 Jul 2024 01:17:39 -0700 Subject: [PATCH 02/61] update hdmf models/schema --- .../src/nwb_linkml/generators/pydantic.py | 1 + nwb_linkml/src/nwb_linkml/includes/hdmf.py | 94 ++-- nwb_linkml/src/nwb_linkml/io/schema.py | 4 +- .../hdmf_common/v1_1_0/hdmf_common_table.py | 246 +++++++++- .../hdmf_common/v1_1_2/hdmf_common_table.py | 246 +++++++++- .../hdmf_common/v1_1_3/hdmf_common_table.py | 246 +++++++++- .../pydantic/hdmf_common/v1_2_0/__init__.py | 1 + .../hdmf_common/v1_2_0/hdmf_common_base.py | 88 ++++ .../hdmf_common/v1_2_0/hdmf_common_sparse.py | 125 +++++ .../hdmf_common/v1_2_0/hdmf_common_table.py | 449 +++++++++++++++++ .../pydantic/hdmf_common/v1_2_0/namespace.py | 83 ++++ .../pydantic/hdmf_common/v1_2_1/__init__.py | 1 + .../hdmf_common/v1_2_1/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_2_1/hdmf_common_sparse.py | 126 +++++ .../hdmf_common/v1_2_1/hdmf_common_table.py | 449 +++++++++++++++++ .../pydantic/hdmf_common/v1_2_1/namespace.py | 83 ++++ .../pydantic/hdmf_common/v1_3_0/__init__.py | 1 + .../hdmf_common/v1_3_0/hdmf_common_base.py | 104 ++++ .../v1_3_0/hdmf_common_resources.py | 181 +++++++ .../hdmf_common/v1_3_0/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_3_0/hdmf_common_table.py | 449 +++++++++++++++++ .../pydantic/hdmf_common/v1_3_0/namespace.py | 86 ++++ .../pydantic/hdmf_common/v1_4_0/__init__.py | 1 + .../hdmf_common/v1_4_0/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_4_0/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_4_0/hdmf_common_table.py | 422 ++++++++++++++++ .../pydantic/hdmf_common/v1_4_0/namespace.py | 77 +++ .../hdmf_common/v1_5_0/hdmf_common_table.py | 246 +++++++++- .../pydantic/hdmf_common/v1_5_1/__init__.py | 1 + .../hdmf_common/v1_5_1/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_5_1/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_5_1/hdmf_common_table.py | 453 ++++++++++++++++++ .../pydantic/hdmf_common/v1_5_1/namespace.py | 78 +++ .../pydantic/hdmf_common/v1_6_0/__init__.py | 1 + .../hdmf_common/v1_6_0/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_6_0/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_6_0/hdmf_common_table.py | 453 ++++++++++++++++++ .../pydantic/hdmf_common/v1_6_0/namespace.py | 78 +++ .../pydantic/hdmf_common/v1_7_0/__init__.py | 1 + .../hdmf_common/v1_7_0/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_7_0/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_7_0/hdmf_common_table.py | 453 ++++++++++++++++++ .../pydantic/hdmf_common/v1_7_0/namespace.py | 78 +++ .../hdmf_common/v1_8_0/hdmf_common_table.py | 246 +++++++++- .../v0_1_0/hdmf_experimental_experimental.py | 4 +- .../v0_1_0/hdmf_experimental_resources.py | 4 +- .../hdmf_experimental/v0_1_0/namespace.py | 7 +- .../hdmf_experimental/v0_2_0/__init__.py | 1 + .../v0_2_0/hdmf_experimental_experimental.py | 93 ++++ .../v0_2_0/hdmf_experimental_resources.py | 199 ++++++++ .../hdmf_experimental/v0_2_0/namespace.py | 89 ++++ .../hdmf_experimental/v0_3_0/__init__.py | 1 + .../v0_3_0/hdmf_experimental_experimental.py | 93 ++++ .../v0_3_0/hdmf_experimental_resources.py | 207 ++++++++ .../hdmf_experimental/v0_3_0/namespace.py | 89 ++++ .../hdmf_experimental/v0_4_0/__init__.py | 1 + .../v0_4_0/hdmf_experimental_experimental.py | 93 ++++ .../v0_4_0/hdmf_experimental_resources.py | 229 +++++++++ .../hdmf_experimental/v0_4_0/namespace.py | 90 ++++ .../v1_1_0/hdmf-common.nwb.language.yaml | 2 +- .../v1_1_2/hdmf-common.nwb.language.yaml | 2 +- .../v1_1_3/hdmf-common.nwb.language.yaml | 2 +- .../hdmf_common/v1_2_0/hdmf-common.base.yaml | 33 ++ .../v1_2_0/hdmf-common.nwb.language.yaml | 109 +++++ .../v1_2_0/hdmf-common.sparse.yaml | 75 +++ .../hdmf_common/v1_2_0/hdmf-common.table.yaml | 181 +++++++ .../linkml/hdmf_common/v1_2_0/namespace.yaml | 17 + .../hdmf_common/v1_2_1/hdmf-common.base.yaml | 46 ++ .../v1_2_1/hdmf-common.nwb.language.yaml | 109 +++++ .../v1_2_1/hdmf-common.sparse.yaml | 77 +++ .../hdmf_common/v1_2_1/hdmf-common.table.yaml | 181 +++++++ .../linkml/hdmf_common/v1_2_1/namespace.yaml | 17 + .../hdmf_common/v1_3_0/hdmf-common.base.yaml | 46 ++ .../v1_3_0/hdmf-common.nwb.language.yaml | 109 +++++ .../v1_3_0/hdmf-common.resources.yaml | 158 ++++++ .../v1_3_0/hdmf-common.sparse.yaml | 66 +++ .../hdmf_common/v1_3_0/hdmf-common.table.yaml | 181 +++++++ .../linkml/hdmf_common/v1_3_0/namespace.yaml | 18 + .../hdmf_common/v1_4_0/hdmf-common.base.yaml | 46 ++ .../v1_4_0/hdmf-common.nwb.language.yaml | 109 +++++ .../v1_4_0/hdmf-common.sparse.yaml | 66 +++ .../hdmf_common/v1_4_0/hdmf-common.table.yaml | 166 +++++++ .../linkml/hdmf_common/v1_4_0/namespace.yaml | 17 + .../v1_5_0/hdmf-common.nwb.language.yaml | 2 +- .../hdmf_common/v1_5_1/hdmf-common.base.yaml | 46 ++ .../v1_5_1/hdmf-common.nwb.language.yaml | 109 +++++ .../v1_5_1/hdmf-common.sparse.yaml | 66 +++ .../hdmf_common/v1_5_1/hdmf-common.table.yaml | 185 +++++++ .../linkml/hdmf_common/v1_5_1/namespace.yaml | 17 + .../hdmf_common/v1_6_0/hdmf-common.base.yaml | 46 ++ .../v1_6_0/hdmf-common.nwb.language.yaml | 109 +++++ .../v1_6_0/hdmf-common.sparse.yaml | 66 +++ .../hdmf_common/v1_6_0/hdmf-common.table.yaml | 185 +++++++ .../linkml/hdmf_common/v1_6_0/namespace.yaml | 17 + .../hdmf_common/v1_7_0/hdmf-common.base.yaml | 46 ++ .../v1_7_0/hdmf-common.nwb.language.yaml | 109 +++++ .../v1_7_0/hdmf-common.sparse.yaml | 66 +++ .../hdmf_common/v1_7_0/hdmf-common.table.yaml | 185 +++++++ .../linkml/hdmf_common/v1_7_0/namespace.yaml | 17 + .../v1_8_0/hdmf-common.nwb.language.yaml | 2 +- .../hdmf-experimental.experimental.yaml | 2 +- .../hdmf-experimental.nwb.language.yaml | 2 +- .../v0_1_0/hdmf-experimental.resources.yaml | 2 +- .../hdmf-experimental.experimental.yaml | 31 ++ .../hdmf-experimental.nwb.language.yaml | 109 +++++ .../v0_2_0/hdmf-experimental.resources.yaml | 196 ++++++++ .../hdmf_experimental/v0_2_0/namespace.yaml | 17 + .../hdmf-experimental.experimental.yaml | 31 ++ .../hdmf-experimental.nwb.language.yaml | 109 +++++ .../v0_3_0/hdmf-experimental.resources.yaml | 199 ++++++++ .../hdmf_experimental/v0_3_0/namespace.yaml | 17 + .../hdmf-experimental.experimental.yaml | 31 ++ .../hdmf-experimental.nwb.language.yaml | 109 +++++ .../v0_4_0/hdmf-experimental.resources.yaml | 222 +++++++++ .../hdmf_experimental/v0_4_0/namespace.yaml | 17 + .../hdmf-experimental.nwb.language.yaml | 2 +- nwb_linkml/tests/fixtures.py | 8 +- nwb_linkml/tests/test_includes/test_hdmf.py | 3 +- 118 files changed, 12050 insertions(+), 84 deletions(-) create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index 3ecf605..0cc613d 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -234,6 +234,7 @@ class AfterGenerateSlot: slot.imports = NamedImports return slot + class AfterGenerateClass: """ Container class for class-modification methods diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index fdbd355..c86499b 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -1,6 +1,7 @@ """ Special types for mimicking HDMF special case behavior """ + from typing import Any, ClassVar, Dict, List, Optional, Union, Tuple, overload, TYPE_CHECKING @@ -23,16 +24,18 @@ class DynamicTableMixin(BaseModel): model_config = ConfigDict(extra="allow") __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] - NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ("name", "colnames", "description",) + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - return { - k: getattr(self, k) for i, k in enumerate(self.colnames) - } + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} @property def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: @@ -48,12 +51,26 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload - def __getitem__(self, item: Tuple[Union[int,slice], ...]) -> Union[DataFrame, list, "NDArray", "VectorData",]: ... + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... @overload def __getitem__(self, item: slice) -> DataFrame: ... - def __getitem__(self, item: Union[str, int, slice, Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...],]) -> Any: + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: """ Get an item from the table @@ -74,7 +91,9 @@ class DynamicTableMixin(BaseModel): elif isinstance(item, tuple): if len(item) != 2: raise ValueError( - f"DynamicTables are 2-dimensional, can't index with more than 2 indices like {item}") + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) # all other cases are tuples of (rows, cols) rows, cols = item @@ -85,20 +104,17 @@ class DynamicTableMixin(BaseModel): else: raise ValueError(f"Unsure how to get item with key {item}") - - def _slice_range(self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames elif isinstance(cols, str): cols = [cols] - data = { - k: self._columns[k][rows] for k in cols - } + data = {k: self._columns[k][rows] for k in cols} return data - - def __setitem__(self, key: str, value: Any) -> None: raise NotImplementedError("TODO") @@ -107,10 +123,10 @@ class DynamicTableMixin(BaseModel): Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, '__pydantic_complete__', False): + if not getattr(self, "__pydantic_complete__", False): return super().__setattr__(key, value) - if key not in self.model_fields_set and not key.endswith('_index'): + if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) return super().__setattr__(key, value) @@ -124,19 +140,23 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ - if 'colnames' not in model: - colnames = [k for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS - and not k.endswith('_index')] - model['colnames'] = colnames + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames else: # add any columns not explicitly given an order at the end - colnames = [k for k in model.keys() if - k not in cls.NON_COLUMN_FIELDS - and not k.endswith('_index') - and k not in model['colnames'].keys() - ] - model['colnames'].extend(colnames) + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) return model @model_validator(mode="after") @@ -164,12 +184,11 @@ class DynamicTableMixin(BaseModel): return self - - class VectorDataMixin(BaseModel): """ Mixin class to give VectorData indexing abilities """ + _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking @@ -194,6 +213,7 @@ class VectorIndexMixin(BaseModel): """ Mixin class to give VectorIndex indexing abilities """ + # redefined in `VectorData`, but included here for testing and type checking array: Optional[NDArray] = None target: Optional["VectorData"] = None @@ -219,7 +239,6 @@ class VectorIndexMixin(BaseModel): else: raise NotImplementedError("DynamicTableRange not supported yet") - def __setitem__(self, key, value) -> None: if self._index: # VectorIndex is the thing that knows how to do the slicing @@ -229,11 +248,18 @@ class VectorIndexMixin(BaseModel): DYNAMIC_TABLE_IMPORTS = Imports( - imports = [ + imports=[ Import(module="pandas", objects=[ObjectImport(name="DataFrame")]), - Import(module="typing", objects=[ObjectImport(name="ClassVar"), ObjectImport(name="overload"), ObjectImport(name="Tuple")]), - Import(module='numpydantic', objects=[ObjectImport(name='NDArray')]), - Import(module="pydantic", objects=[ObjectImport(name="model_validator")]) + Import( + module="typing", + objects=[ + ObjectImport(name="ClassVar"), + ObjectImport(name="overload"), + ObjectImport(name="Tuple"), + ], + ), + Import(module="numpydantic", objects=[ObjectImport(name="NDArray")]), + Import(module="pydantic", objects=[ObjectImport(name="model_validator")]), ] ) """ diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py index d5ce5c8..a162856 100644 --- a/nwb_linkml/src/nwb_linkml/io/schema.py +++ b/nwb_linkml/src/nwb_linkml/io/schema.py @@ -120,7 +120,9 @@ def load_namespace_adapter( return adapter -def load_nwb_core(core_version: str = "2.7.0", hdmf_version: str = "1.8.0", hdmf_only:bool=False) -> NamespacesAdapter: +def load_nwb_core( + core_version: str = "2.7.0", hdmf_version: str = "1.8.0", hdmf_only: bool = False +) -> NamespacesAdapter: """ Convenience function for loading the NWB core schema + hdmf-common as a namespace adapter. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index ffd4424..c6e8f35 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -4,10 +4,11 @@ from decimal import Decimal from enum import Enum import re import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" version = "1.1.0" @@ -46,6 +47,241 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + linkml_meta = LinkMLMeta( { "annotations": { @@ -87,7 +323,7 @@ class Index(Data): ) -class VectorData(Data): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on. """ @@ -102,7 +338,7 @@ class VectorData(Data): ) -class VectorIndex(Index): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. """ @@ -161,7 +397,7 @@ class Container(ConfiguredBaseModel): name: str = Field(...) -class DynamicTable(Container): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 0b75bec..44209ba 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -4,10 +4,11 @@ from decimal import Decimal from enum import Enum import re import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" version = "1.1.2" @@ -46,6 +47,241 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + linkml_meta = LinkMLMeta( { "annotations": { @@ -87,7 +323,7 @@ class Index(Data): ) -class VectorData(Data): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on. """ @@ -102,7 +338,7 @@ class VectorData(Data): ) -class VectorIndex(Index): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. """ @@ -161,7 +397,7 @@ class Container(ConfiguredBaseModel): name: str = Field(...) -class DynamicTable(Container): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index ae84bd1..703ff6c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -4,10 +4,11 @@ from decimal import Decimal from enum import Enum import re import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator metamodel_version = "None" version = "1.1.3" @@ -46,6 +47,241 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + linkml_meta = LinkMLMeta( { "annotations": { @@ -87,7 +323,7 @@ class Index(Data): ) -class VectorData(Data): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex(0)+1]. The second vector is at VectorData[VectorIndex(0)+1:VectorIndex(1)+1], and so on. """ @@ -110,7 +346,7 @@ class VectorData(Data): ] = Field(None) -class VectorIndex(Index): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. """ @@ -180,7 +416,7 @@ class Container(ConfiguredBaseModel): name: str = Field(...) -class DynamicTable(Container): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). Apart from a column that contains unique identifiers for each row there are no other required datasets. Users are free to add any number of VectorData objects here. Table functionality is already supported through compound types, which is analogous to storing an array-of-structs. DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. For example, DynamicTable was originally developed for storing trial data and spike unit metadata. Both of these use cases are expected to produce relatively small tables, so the spatial locality of multiple datasets present in a DynamicTable is not expected to have a significant performance impact. Additionally, requirements of trial and unit metadata tables are sufficiently diverse that performance implications can be overlooked in favor of usability. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py new file mode 100644 index 0000000..1d657d9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py @@ -0,0 +1,88 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py new file mode 100644 index 0000000..31afdb0 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py @@ -0,0 +1,125 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(ConfiguredBaseModel): + """ + a compressed sparse row matrix + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") + indices: CSRMatrixIndices = Field(..., description="""column indices""") + indptr: CSRMatrixIndptr = Field(..., description="""index pointer""") + data: CSRMatrixData = Field(..., description="""values in the matrix""") + + +class CSRMatrixIndices(ConfiguredBaseModel): + """ + column indices + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["indices"] = Field( + "indices", + json_schema_extra={ + "linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"} + }, + ) + + +class CSRMatrixIndptr(ConfiguredBaseModel): + """ + index pointer + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["indptr"] = Field( + "indptr", + json_schema_extra={ + "linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"} + }, + ) + + +class CSRMatrixData(ConfiguredBaseModel): + """ + values in the matrix + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixIndices.model_rebuild() +CSRMatrixIndptr.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py new file mode 100644 index 0000000..6ded400 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -0,0 +1,449 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator + +metamodel_version = "None" +version = "1.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: Optional[DynamicTable] = Field( + None, description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VocabData(VectorData): + """ + Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + vocabulary: Optional[str] = Field( + None, description="""The available items in the controlled vocabulary.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +VocabData.model_rebuild() +DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py new file mode 100644 index 0000000..62d22cb --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py @@ -0,0 +1,83 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_2_0.hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) +from ...hdmf_common.v1_2_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + VocabData, + DynamicTable, +) +from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container + +metamodel_version = "None" +version = "1.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py new file mode 100644 index 0000000..c891ed8 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.2.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + children: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py new file mode 100644 index 0000000..9e2e7ce --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py @@ -0,0 +1,126 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_2_1.hdmf_common_base import Container + +metamodel_version = "None" +version = "1.2.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + a compressed sparse row matrix + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") + indices: CSRMatrixIndices = Field(..., description="""column indices""") + indptr: CSRMatrixIndptr = Field(..., description="""index pointer""") + data: CSRMatrixData = Field(..., description="""values in the matrix""") + + +class CSRMatrixIndices(ConfiguredBaseModel): + """ + column indices + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["indices"] = Field( + "indices", + json_schema_extra={ + "linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"} + }, + ) + + +class CSRMatrixIndptr(ConfiguredBaseModel): + """ + index pointer + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["indptr"] = Field( + "indptr", + json_schema_extra={ + "linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"} + }, + ) + + +class CSRMatrixData(ConfiguredBaseModel): + """ + values in the matrix + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixIndices.model_rebuild() +CSRMatrixIndptr.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py new file mode 100644 index 0000000..fd4377f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -0,0 +1,449 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator + +metamodel_version = "None" +version = "1.2.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: Optional[DynamicTable] = Field( + None, description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VocabData(VectorData): + """ + Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + vocabulary: Optional[str] = Field( + None, description="""The available items in the controlled vocabulary.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +VocabData.model_rebuild() +DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py new file mode 100644 index 0000000..55f5dc6 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py @@ -0,0 +1,83 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_2_1.hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) +from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_2_1.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + VocabData, + DynamicTable, +) + +metamodel_version = "None" +version = "1.2.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py new file mode 100644 index 0000000..63bbcf2 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + children: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py new file mode 100644 index 0000000..81f3031 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py @@ -0,0 +1,181 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.resources/", + "id": "hdmf-common.resources", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.resources", + } +) + + +class ExternalResources(Container): + """ + A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.resources", "tree_root": True} + ) + + name: str = Field(...) + keys: ExternalResourcesKeys = Field( + ..., + description="""A table for storing user terms that are used to refer to external resources.""", + ) + resources: ExternalResourcesResources = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + objects: ExternalResourcesObjects = Field( + ..., + description="""A table for identifying which objects in a file contain references to external resources.""", + ) + object_keys: ExternalResourcesObjectKeys = Field( + ..., description="""A table for identifying which objects use which keys.""" + ) + + +class ExternalResourcesKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) + + name: Literal["keys"] = Field( + "keys", + json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, + ) + key_name: str = Field( + ..., + description="""The user term that maps to one or more resources in the 'resources' table.""", + ) + + +class ExternalResourcesResources(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) + + name: Literal["resources"] = Field( + "resources", + json_schema_extra={ + "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"} + }, + ) + keytable_idx: np.uint64 = Field( + ..., description="""The index to the key in the 'keys' table.""" + ) + resource_name: str = Field( + ..., + description="""The name of the online resource (e.g., website, database) that has the entity.""", + ) + resource_id: str = Field( + ..., description="""The unique identifier for the resource entity at the resource.""" + ) + uri: str = Field( + ..., + description="""The URI for the resource entity this reference applies to. This can be an empty string.""", + ) + + +class ExternalResourcesObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) + + name: Literal["objects"] = Field( + "objects", + json_schema_extra={ + "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} + }, + ) + object_id: str = Field(..., description="""The UUID for the object.""") + field: str = Field( + ..., + description="""The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values.""", + ) + + +class ExternalResourcesObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) + + name: Literal["object_keys"] = Field( + "object_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} + }, + ) + objecttable_idx: np.uint64 = Field( + ..., description="""The index to the 'objects' table for the object that holds the key.""" + ) + keytable_idx: np.uint64 = Field( + ..., description="""The index to the 'keys' table for the key.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ExternalResources.model_rebuild() +ExternalResourcesKeys.model_rebuild() +ExternalResourcesResources.model_rebuild() +ExternalResourcesObjects.model_rebuild() +ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py new file mode 100644 index 0000000..fe3047c --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_3_0.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: Optional[np.uint64] = Field( + None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py new file mode 100644 index 0000000..7f3f848 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -0,0 +1,449 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: Optional[DynamicTable] = Field( + None, description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VocabData(VectorData): + """ + Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + vocabulary: Optional[str] = Field( + None, description="""The available items in the controlled vocabulary.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +VocabData.model_rebuild() +DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py new file mode 100644 index 0000000..a2dcc70 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py @@ -0,0 +1,86 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_3_0.hdmf_common_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesResources, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) +from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_3_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_3_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + VocabData, + DynamicTable, +) + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.resources", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py new file mode 100644 index 0000000..c26f4f8 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + children: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py new file mode 100644 index 0000000..83e31dd --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_4_0.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: Optional[np.uint64] = Field( + None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py new file mode 100644 index 0000000..20c9a63 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -0,0 +1,422 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator + +metamodel_version = "None" +version = "1.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: Optional[DynamicTable] = Field( + None, description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py new file mode 100644 index 0000000..db59f28 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py @@ -0,0 +1,77 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_4_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, +) + +metamodel_version = "None" +version = "1.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 9e2b445..ba68e69 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -4,10 +4,11 @@ from decimal import Decimal from enum import Enum import re import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from numpydantic import NDArray, Shape metamodel_version = "None" @@ -47,6 +48,241 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + linkml_meta = LinkMLMeta( { "annotations": { @@ -61,7 +297,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(Data): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -84,7 +320,7 @@ class VectorData(Data): ] = Field(None) -class VectorIndex(VectorData): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -150,7 +386,7 @@ class DynamicTableRegion(VectorData): ] = Field(None) -class DynamicTable(Container): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py new file mode 100644 index 0000000..ad29fbc --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.5.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + children: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py new file mode 100644 index 0000000..a095b6b --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.5.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: Optional[np.uint64] = Field( + None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py new file mode 100644 index 0000000..91746ee --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -0,0 +1,453 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.5.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: Optional[DynamicTable] = Field( + None, description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + children: Optional[List[DynamicTable]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} + ) + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +DynamicTable.model_rebuild() +AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py new file mode 100644 index 0000000..836122e --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py @@ -0,0 +1,78 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_5_1.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +metamodel_version = "None" +version = "1.5.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py new file mode 100644 index 0000000..499e6ad --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.6.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + children: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py new file mode 100644 index 0000000..0966f74 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.6.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: Optional[np.uint64] = Field( + None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py new file mode 100644 index 0000000..e0e06bc --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -0,0 +1,453 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.6.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: Optional[DynamicTable] = Field( + None, description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + children: Optional[List[DynamicTable]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} + ) + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +DynamicTable.model_rebuild() +AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py new file mode 100644 index 0000000..1dc832f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py @@ -0,0 +1,78 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_6_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +metamodel_version = "None" +version = "1.6.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py new file mode 100644 index 0000000..751693c --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + children: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py new file mode 100644 index 0000000..c6bfab5 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: Optional[np.uint64] = Field( + None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py new file mode 100644 index 0000000..65a9dd3 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -0,0 +1,453 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: Optional[DynamicTable] = Field( + None, description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + children: Optional[List[DynamicTable]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} + ) + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +DynamicTable.model_rebuild() +AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py new file mode 100644 index 0000000..7d70e39 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py @@ -0,0 +1,78 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_7_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +metamodel_version = "None" +version = "1.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 46ad6ef..986c628 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -4,10 +4,11 @@ from decimal import Decimal from enum import Enum import re import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container +from pandas import DataFrame +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from numpydantic import NDArray, Shape metamodel_version = "None" @@ -47,6 +48,241 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.array[item] + + def __setitem__(self, key, value) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + array: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int): + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.array[arg - 1] + end = self.array[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.array[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.array))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key, value) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.array[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorData", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + + data = {k: self._columns[k][rows] for k in cols} + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]): + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model.keys() + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"].keys() + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex): + if field_name == f"{key}_index": + idx = field + break + elif field.target is col: + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + linkml_meta = LinkMLMeta( { "annotations": { @@ -61,7 +297,7 @@ linkml_meta = LinkMLMeta( ) -class VectorData(Data): +class VectorData(VectorDataMixin): """ An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. """ @@ -84,7 +320,7 @@ class VectorData(Data): ] = Field(None) -class VectorIndex(VectorData): +class VectorIndex(VectorIndexMixin): """ Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". """ @@ -150,7 +386,7 @@ class DynamicTableRegion(VectorData): ] = Field(None) -class DynamicTable(Container): +class DynamicTable(DynamicTableMixin): """ A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index 065f135..0ca8353 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -7,7 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData +from ...hdmf_common.v1_4_0.hdmf_common_table import VectorData from numpydantic import NDArray, Shape metamodel_version = "None" @@ -55,7 +55,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.experimental/", "id": "hdmf-experimental.experimental", - "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.experimental", } ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index db8a186..affdb23 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -7,7 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data +from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data metamodel_version = "None" version = "0.1.0" @@ -53,7 +53,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.resources/", "id": "hdmf-experimental.resources", - "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.resources", } ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py index 69ffad1..7ea10f7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py @@ -15,15 +15,14 @@ from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import ( ExternalResourcesObjects, ExternalResourcesObjectKeys, ) -from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_5_0.hdmf_common_table import ( +from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_4_0.hdmf_common_table import ( VectorData, VectorIndex, ElementIdentifiers, DynamicTableRegion, DynamicTable, - AlignedDynamicTable, ) from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py new file mode 100644 index 0000000..7fef3e3 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py @@ -0,0 +1,93 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_table import VectorData +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "0.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.experimental/", + "id": "hdmf-experimental.experimental", + "imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.experimental", + } +) + + +class EnumData(VectorData): + """ + Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.experimental", "tree_root": True} + ) + + name: str = Field(...) + elements: Optional[VectorData] = Field( + None, + description="""Reference to the VectorData object that contains the enumerable elements""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py new file mode 100644 index 0000000..7606660 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py @@ -0,0 +1,199 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data + +metamodel_version = "None" +version = "0.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.resources/", + "id": "hdmf-experimental.resources", + "imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.resources", + } +) + + +class ExternalResources(Container): + """ + A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.resources", "tree_root": True} + ) + + name: str = Field(...) + keys: ExternalResourcesKeys = Field( + ..., + description="""A table for storing user terms that are used to refer to external resources.""", + ) + entities: ExternalResourcesEntities = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + resources: ExternalResourcesResources = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + objects: ExternalResourcesObjects = Field( + ..., + description="""A table for identifying which objects in a file contain references to external resources.""", + ) + object_keys: ExternalResourcesObjectKeys = Field( + ..., description="""A table for identifying which objects use which keys.""" + ) + + +class ExternalResourcesKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["keys"] = Field( + "keys", + json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, + ) + key: str = Field( + ..., + description="""The user term that maps to one or more resources in the 'resources' table.""", + ) + + +class ExternalResourcesEntities(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["entities"] = Field( + "entities", + json_schema_extra={ + "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} + }, + ) + keys_idx: np.uint64 = Field(..., description="""The index to the key in the 'keys' table.""") + resources_idx: np.uint64 = Field(..., description="""The index into the 'resources' table""") + entity_id: str = Field(..., description="""The unique identifier entity.""") + entity_uri: str = Field( + ..., + description="""The URI for the entity this reference applies to. This can be an empty string.""", + ) + + +class ExternalResourcesResources(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["resources"] = Field( + "resources", + json_schema_extra={ + "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"} + }, + ) + resource: str = Field(..., description="""The name of the resource.""") + resource_uri: str = Field( + ..., description="""The URI for the resource. This can be an empty string.""" + ) + + +class ExternalResourcesObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["objects"] = Field( + "objects", + json_schema_extra={ + "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} + }, + ) + object_id: str = Field(..., description="""The UUID for the object.""") + relative_path: str = Field( + ..., + description="""The relative path from the container with the object_id to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the container is a dataset which contains the value(s) that is associated with an external resource.""", + ) + field: str = Field( + ..., + description="""The field of the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + ) + + +class ExternalResourcesObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["object_keys"] = Field( + "object_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} + }, + ) + objects_idx: np.uint64 = Field( + ..., description="""The index to the 'objects' table for the object that holds the key.""" + ) + keys_idx: np.uint64 = Field(..., description="""The index to the 'keys' table for the key.""") + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ExternalResources.model_rebuild() +ExternalResourcesKeys.model_rebuild() +ExternalResourcesEntities.model_rebuild() +ExternalResourcesResources.model_rebuild() +ExternalResourcesObjects.model_rebuild() +ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py new file mode 100644 index 0000000..1345536 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py @@ -0,0 +1,89 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_experimental.v0_2_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesEntities, + ExternalResourcesResources, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) +from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_5_1.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) +from ...hdmf_experimental.v0_2_0.hdmf_experimental_experimental import EnumData + +metamodel_version = "None" +version = "0.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental/", + "description": ( + "Experimental data structures provided by HDMF. These are not " + "guaranteed to be available in the future." + ), + "id": "hdmf-experimental", + "imports": [ + "hdmf-experimental.experimental", + "hdmf-experimental.resources", + "hdmf-experimental.nwb.language", + ], + "name": "hdmf-experimental", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py new file mode 100644 index 0000000..f57439f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py @@ -0,0 +1,93 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_table import VectorData +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "0.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.experimental/", + "id": "hdmf-experimental.experimental", + "imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.experimental", + } +) + + +class EnumData(VectorData): + """ + Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.experimental", "tree_root": True} + ) + + name: str = Field(...) + elements: Optional[VectorData] = Field( + None, + description="""Reference to the VectorData object that contains the enumerable elements""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py new file mode 100644 index 0000000..600eb18 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py @@ -0,0 +1,207 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data + +metamodel_version = "None" +version = "0.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.resources/", + "id": "hdmf-experimental.resources", + "imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.resources", + } +) + + +class ExternalResources(Container): + """ + A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.resources", "tree_root": True} + ) + + name: str = Field(...) + keys: ExternalResourcesKeys = Field( + ..., + description="""A table for storing user terms that are used to refer to external resources.""", + ) + files: ExternalResourcesFiles = Field( + ..., description="""A table for storing object ids of files used in external resources.""" + ) + entities: ExternalResourcesEntities = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + objects: ExternalResourcesObjects = Field( + ..., + description="""A table for identifying which objects in a file contain references to external resources.""", + ) + object_keys: ExternalResourcesObjectKeys = Field( + ..., description="""A table for identifying which objects use which keys.""" + ) + + +class ExternalResourcesKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["keys"] = Field( + "keys", + json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, + ) + key: str = Field( + ..., + description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", + ) + + +class ExternalResourcesFiles(Data): + """ + A table for storing object ids of files used in external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["files"] = Field( + "files", + json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}}, + ) + file_object_id: str = Field( + ..., + description="""The object id (UUID) of a file that contains objects that refers to external resources.""", + ) + + +class ExternalResourcesEntities(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["entities"] = Field( + "entities", + json_schema_extra={ + "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} + }, + ) + keys_idx: np.uint64 = Field( + ..., description="""The row index to the key in the `keys` table.""" + ) + entity_id: str = Field( + ..., + description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", + ) + entity_uri: str = Field( + ..., + description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", + ) + + +class ExternalResourcesObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["objects"] = Field( + "objects", + json_schema_extra={ + "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} + }, + ) + files_idx: np.uint64 = Field( + ..., description="""The row index to the file in the `files` table containing the object.""" + ) + object_id: str = Field(..., description="""The object id (UUID) of the object.""") + object_type: str = Field(..., description="""The data type of the object.""") + relative_path: str = Field( + ..., + description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", + ) + field: str = Field( + ..., + description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + ) + + +class ExternalResourcesObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["object_keys"] = Field( + "object_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} + }, + ) + objects_idx: np.uint64 = Field( + ..., description="""The row index to the object in the `objects` table that holds the key""" + ) + keys_idx: np.uint64 = Field( + ..., description="""The row index to the key in the `keys` table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ExternalResources.model_rebuild() +ExternalResourcesKeys.model_rebuild() +ExternalResourcesFiles.model_rebuild() +ExternalResourcesEntities.model_rebuild() +ExternalResourcesObjects.model_rebuild() +ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py new file mode 100644 index 0000000..8361004 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py @@ -0,0 +1,89 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_experimental.v0_3_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesFiles, + ExternalResourcesEntities, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) +from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_6_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) +from ...hdmf_experimental.v0_3_0.hdmf_experimental_experimental import EnumData + +metamodel_version = "None" +version = "0.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental/", + "description": ( + "Experimental data structures provided by HDMF. These are not " + "guaranteed to be available in the future." + ), + "id": "hdmf-experimental", + "imports": [ + "hdmf-experimental.experimental", + "hdmf-experimental.resources", + "hdmf-experimental.nwb.language", + ], + "name": "hdmf-experimental", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py new file mode 100644 index 0000000..14e4d1a --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py @@ -0,0 +1,93 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_table import VectorData +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "0.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.experimental/", + "id": "hdmf-experimental.experimental", + "imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.experimental", + } +) + + +class EnumData(VectorData): + """ + Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.experimental", "tree_root": True} + ) + + name: str = Field(...) + elements: Optional[VectorData] = Field( + None, + description="""Reference to the VectorData object that contains the enumerable elements""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py new file mode 100644 index 0000000..6279463 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py @@ -0,0 +1,229 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data + +metamodel_version = "None" +version = "0.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.resources/", + "id": "hdmf-experimental.resources", + "imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.resources", + } +) + + +class ExternalResources(Container): + """ + A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.resources", "tree_root": True} + ) + + name: str = Field(...) + keys: ExternalResourcesKeys = Field( + ..., + description="""A table for storing user terms that are used to refer to external resources.""", + ) + files: ExternalResourcesFiles = Field( + ..., description="""A table for storing object ids of files used in external resources.""" + ) + entities: ExternalResourcesEntities = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + objects: ExternalResourcesObjects = Field( + ..., + description="""A table for identifying which objects in a file contain references to external resources.""", + ) + object_keys: ExternalResourcesObjectKeys = Field( + ..., description="""A table for identifying which objects use which keys.""" + ) + entity_keys: ExternalResourcesEntityKeys = Field( + ..., description="""A table for identifying which keys use which entity.""" + ) + + +class ExternalResourcesKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["keys"] = Field( + "keys", + json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, + ) + key: str = Field( + ..., + description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", + ) + + +class ExternalResourcesFiles(Data): + """ + A table for storing object ids of files used in external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["files"] = Field( + "files", + json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}}, + ) + file_object_id: str = Field( + ..., + description="""The object id (UUID) of a file that contains objects that refers to external resources.""", + ) + + +class ExternalResourcesEntities(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["entities"] = Field( + "entities", + json_schema_extra={ + "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} + }, + ) + entity_id: str = Field( + ..., + description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", + ) + entity_uri: str = Field( + ..., + description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", + ) + + +class ExternalResourcesObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["objects"] = Field( + "objects", + json_schema_extra={ + "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} + }, + ) + files_idx: np.uint64 = Field( + ..., description="""The row index to the file in the `files` table containing the object.""" + ) + object_id: str = Field(..., description="""The object id (UUID) of the object.""") + object_type: str = Field(..., description="""The data type of the object.""") + relative_path: str = Field( + ..., + description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", + ) + field: str = Field( + ..., + description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + ) + + +class ExternalResourcesObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["object_keys"] = Field( + "object_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} + }, + ) + objects_idx: np.uint64 = Field( + ..., description="""The row index to the object in the `objects` table that holds the key""" + ) + keys_idx: np.uint64 = Field( + ..., description="""The row index to the key in the `keys` table.""" + ) + + +class ExternalResourcesEntityKeys(Data): + """ + A table for identifying which keys use which entity. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["entity_keys"] = Field( + "entity_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "entity_keys", "ifabsent": "string(entity_keys)"} + }, + ) + entities_idx: np.uint64 = Field( + ..., description="""The row index to the entity in the `entities` table.""" + ) + keys_idx: np.uint64 = Field( + ..., description="""The row index to the key in the `keys` table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ExternalResources.model_rebuild() +ExternalResourcesKeys.model_rebuild() +ExternalResourcesFiles.model_rebuild() +ExternalResourcesEntities.model_rebuild() +ExternalResourcesObjects.model_rebuild() +ExternalResourcesObjectKeys.model_rebuild() +ExternalResourcesEntityKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py new file mode 100644 index 0000000..c642308 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py @@ -0,0 +1,90 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_experimental.v0_4_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesFiles, + ExternalResourcesEntities, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, + ExternalResourcesEntityKeys, +) +from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_7_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) +from ...hdmf_experimental.v0_4_0.hdmf_experimental_experimental import EnumData + +metamodel_version = "None" +version = "0.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental/", + "description": ( + "Experimental data structures provided by HDMF. These are not " + "guaranteed to be available in the future." + ), + "id": "hdmf-experimental", + "imports": [ + "hdmf-experimental.experimental", + "hdmf-experimental.resources", + "hdmf-experimental.nwb.language", + ], + "name": "hdmf-experimental", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml index 68f0304..35776f9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-common description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml index 68f0304..35776f9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-common description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml index 68f0304..35776f9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-common description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml new file mode 100644 index 0000000..ff30beb --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml @@ -0,0 +1,33 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.2.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..35776f9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-common +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..2d9616c --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml @@ -0,0 +1,75 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.2.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: a compressed sparse row matrix + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: the shape of this sparse matrix + range: int + indices: + name: indices + description: column indices + range: CSRMatrix__indices + required: true + multivalued: false + indptr: + name: indptr + description: index pointer + range: CSRMatrix__indptr + required: true + multivalued: false + data: + name: data + description: values in the matrix + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__indices: + name: CSRMatrix__indices + description: column indices + attributes: + name: + name: name + ifabsent: string(indices) + range: string + required: true + equals_string: indices + CSRMatrix__indptr: + name: CSRMatrix__indptr + description: index pointer + attributes: + name: + name: name + ifabsent: string(indptr) + range: string + required: true + equals_string: indptr + CSRMatrix__data: + name: CSRMatrix__data + description: values in the matrix + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml new file mode 100644 index 0000000..accfb99 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml @@ -0,0 +1,181 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.2.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + array: + name: array + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + description: + name: description + description: Description of what this table region points to. + range: text + tree_root: true + VocabData: + name: VocabData + description: Data that come from a controlled vocabulary of text values. A data + value of i corresponds to the i-th element in the 'vocabulary' array attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + vocabulary: + name: vocabulary + description: The available items in the controlled vocabulary. + range: text + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + description: + name: description + description: Description of what is in this dynamic table. + range: text + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml new file mode 100644 index 0000000..7befc87 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.2.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml new file mode 100644 index 0000000..17f8013 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.2.1 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers + is_a: Container + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..35776f9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-common +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml new file mode 100644 index 0000000..b480dbe --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml @@ -0,0 +1,77 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.2.1 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: a compressed sparse row matrix + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: the shape of this sparse matrix + range: int + indices: + name: indices + description: column indices + range: CSRMatrix__indices + required: true + multivalued: false + indptr: + name: indptr + description: index pointer + range: CSRMatrix__indptr + required: true + multivalued: false + data: + name: data + description: values in the matrix + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__indices: + name: CSRMatrix__indices + description: column indices + attributes: + name: + name: name + ifabsent: string(indices) + range: string + required: true + equals_string: indices + CSRMatrix__indptr: + name: CSRMatrix__indptr + description: index pointer + attributes: + name: + name: name + ifabsent: string(indptr) + range: string + required: true + equals_string: indptr + CSRMatrix__data: + name: CSRMatrix__data + description: values in the matrix + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml new file mode 100644 index 0000000..4b3b3ef --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml @@ -0,0 +1,181 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.2.1 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + array: + name: array + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + description: + name: description + description: Description of what this table region points to. + range: text + tree_root: true + VocabData: + name: VocabData + description: Data that come from a controlled vocabulary of text values. A data + value of i corresponds to the i-th element in the 'vocabulary' array attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + vocabulary: + name: vocabulary + description: The available items in the controlled vocabulary. + range: text + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + description: + name: description + description: Description of what is in this dynamic table. + range: text + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml new file mode 100644 index 0000000..e29bfb9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.2.1 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml new file mode 100644 index 0000000..22efa9c --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.3.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..35776f9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-common +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml new file mode 100644 index 0000000..918a6a5 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml @@ -0,0 +1,158 @@ +name: hdmf-common.resources +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.resources +version: 1.3.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.resources/ +classes: + ExternalResources: + name: ExternalResources + description: 'A set of four tables for tracking external resource references in + a file. NOTE: this data type is in beta testing and is subject to change in + a later version.' + is_a: Container + attributes: + name: + name: name + range: string + required: true + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + range: ExternalResources__keys + required: true + multivalued: false + resources: + name: resources + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__resources + required: true + multivalued: false + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + range: ExternalResources__objects + required: true + multivalued: false + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + range: ExternalResources__object_keys + required: true + multivalued: false + tree_root: true + ExternalResources__keys: + name: ExternalResources__keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(keys) + range: string + required: true + equals_string: keys + key_name: + name: key_name + description: The user term that maps to one or more resources in the 'resources' + table. + range: text + required: true + multivalued: false + ExternalResources__resources: + name: ExternalResources__resources + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(resources) + range: string + required: true + equals_string: resources + keytable_idx: + name: keytable_idx + description: The index to the key in the 'keys' table. + range: uint + required: true + multivalued: false + resource_name: + name: resource_name + description: The name of the online resource (e.g., website, database) that + has the entity. + range: text + required: true + multivalued: false + resource_id: + name: resource_id + description: The unique identifier for the resource entity at the resource. + range: text + required: true + multivalued: false + uri: + name: uri + description: The URI for the resource entity this reference applies to. This + can be an empty string. + range: text + required: true + multivalued: false + ExternalResources__objects: + name: ExternalResources__objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(objects) + range: string + required: true + equals_string: objects + object_id: + name: object_id + description: The UUID for the object. + range: text + required: true + multivalued: false + field: + name: field + description: The field of the object. This can be an empty string if the object + is a dataset and the field is the dataset values. + range: text + required: true + multivalued: false + ExternalResources__object_keys: + name: ExternalResources__object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + name: + name: name + ifabsent: string(object_keys) + range: string + required: true + equals_string: object_keys + objecttable_idx: + name: objecttable_idx + description: The index to the 'objects' table for the object that holds the + key. + range: uint + required: true + multivalued: false + keytable_idx: + name: keytable_idx + description: The index to the 'keys' table for the key. + range: uint + required: true + multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..264e15d --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml @@ -0,0 +1,66 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.3.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml new file mode 100644 index 0000000..478c18d --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml @@ -0,0 +1,181 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.3.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + array: + name: array + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + description: + name: description + description: Description of what this table region points to. + range: text + tree_root: true + VocabData: + name: VocabData + description: Data that come from a controlled vocabulary of text values. A data + value of i corresponds to the i-th element in the 'vocabulary' array attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + vocabulary: + name: vocabulary + description: The available items in the controlled vocabulary. + range: text + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + description: + name: description + description: Description of what is in this dynamic table. + range: text + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml new file mode 100644 index 0000000..11885e7 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml @@ -0,0 +1,18 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.3.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.resources +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml new file mode 100644 index 0000000..ef51ebd --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.4.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..98c00e4 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..a004e10 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml @@ -0,0 +1,66 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.4.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml new file mode 100644 index 0000000..ca9ead2 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml @@ -0,0 +1,166 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.4.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + array: + name: array + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + description: + name: description + description: Description of what this table region points to. + range: text + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + description: + name: description + description: Description of what is in this dynamic table. + range: text + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml new file mode 100644 index 0000000..50680da --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.4.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml index 68f0304..98c00e4 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml new file mode 100644 index 0000000..75e5a6c --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.5.1 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..98c00e4 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml new file mode 100644 index 0000000..380bc7e --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml @@ -0,0 +1,66 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.5.1 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml new file mode 100644 index 0000000..557721d --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml @@ -0,0 +1,185 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.5.1 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + array: + name: array + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + description: + name: description + description: Description of what this table region points to. + range: text + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + description: + name: description + description: Description of what is in this dynamic table. + range: text + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true + AlignedDynamicTable: + name: AlignedDynamicTable + description: DynamicTable container that supports storing a collection of sub-tables. + Each sub-table is a DynamicTable itself that is aligned with the main table + by row index. I.e., all DynamicTables stored in this group MUST have the same + number of rows. This type effectively defines a 2-level table in which the main + data is stored in the main table implemented by this type and additional columns + of the table are grouped into categories, with each category being represented + by a separate DynamicTable stored within the group. + is_a: DynamicTable + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml new file mode 100644 index 0000000..917870d --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.5.1 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml new file mode 100644 index 0000000..7031d84 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.6.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..98c00e4 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..e2e8cff --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml @@ -0,0 +1,66 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.6.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml new file mode 100644 index 0000000..100ea47 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml @@ -0,0 +1,185 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.6.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + array: + name: array + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + description: + name: description + description: Description of what this table region points to. + range: text + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + description: + name: description + description: Description of what is in this dynamic table. + range: text + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true + AlignedDynamicTable: + name: AlignedDynamicTable + description: DynamicTable container that supports storing a collection of sub-tables. + Each sub-table is a DynamicTable itself that is aligned with the main table + by row index. I.e., all DynamicTables stored in this group MUST have the same + number of rows. This type effectively defines a 2-level table in which the main + data is stored in the main table implemented by this type and additional columns + of the table are grouped into categories, with each category being represented + by a separate DynamicTable stored within the group. + is_a: DynamicTable + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml new file mode 100644 index 0000000..241b849 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.6.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml new file mode 100644 index 0000000..acf8a08 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.7.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..98c00e4 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..e258d51 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml @@ -0,0 +1,66 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.7.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml new file mode 100644 index 0000000..3dba25e --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml @@ -0,0 +1,185 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.7.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + array: + name: array + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + description: + name: description + description: Description of what this table region points to. + range: text + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + description: + name: description + description: Description of what is in this dynamic table. + range: text + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true + AlignedDynamicTable: + name: AlignedDynamicTable + description: DynamicTable container that supports storing a collection of sub-tables. + Each sub-table is a DynamicTable itself that is aligned with the main table + by row index. I.e., all DynamicTables stored in this group MUST have the same + number of rows. This type effectively defines a 2-level table in which the main + data is stored in the main table implemented by this type and additional columns + of the table are grouped into categories, with each category being represented + by a separate DynamicTable stored within the group. + is_a: DynamicTable + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml new file mode 100644 index 0000000..b689554 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.7.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml index 68f0304..98c00e4 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml index c14e264..6b54542 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.experimental version: 0.1.0 imports: -- ../../hdmf_common/v1_5_0/namespace +- ../../hdmf_common/v1_4_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.experimental/ classes: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml index 5bb0e2b..a884e44 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml index 17a7d9d..89ffc2c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.resources version: 0.1.0 imports: -- ../../hdmf_common/v1_5_0/namespace +- ../../hdmf_common/v1_4_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.resources/ classes: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml new file mode 100644 index 0000000..daf947b --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml @@ -0,0 +1,31 @@ +name: hdmf-experimental.experimental +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.experimental +version: 0.2.0 +imports: +- ../../hdmf_common/v1_5_1/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.experimental/ +classes: + EnumData: + name: EnumData + description: Data that come from a fixed set of values. A data value of i corresponds + to the i-th value in the VectorData referenced by the 'elements' attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + elements: + name: elements + description: Reference to the VectorData object that contains the enumerable + elements + range: VectorData + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml new file mode 100644 index 0000000..a884e44 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-experimental.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml new file mode 100644 index 0000000..c2fc8d8 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml @@ -0,0 +1,196 @@ +name: hdmf-experimental.resources +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.resources +version: 0.2.0 +imports: +- ../../hdmf_common/v1_5_1/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.resources/ +classes: + ExternalResources: + name: ExternalResources + description: 'A set of four tables for tracking external resource references in + a file. NOTE: this data type is in beta testing and is subject to change in + a later version.' + is_a: Container + attributes: + name: + name: name + range: string + required: true + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + range: ExternalResources__keys + required: true + multivalued: false + entities: + name: entities + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__entities + required: true + multivalued: false + resources: + name: resources + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__resources + required: true + multivalued: false + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + range: ExternalResources__objects + required: true + multivalued: false + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + range: ExternalResources__object_keys + required: true + multivalued: false + tree_root: true + ExternalResources__keys: + name: ExternalResources__keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(keys) + range: string + required: true + equals_string: keys + key: + name: key + description: The user term that maps to one or more resources in the 'resources' + table. + range: text + required: true + multivalued: false + ExternalResources__entities: + name: ExternalResources__entities + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entities) + range: string + required: true + equals_string: entities + keys_idx: + name: keys_idx + description: The index to the key in the 'keys' table. + range: uint + required: true + multivalued: false + resources_idx: + name: resources_idx + description: The index into the 'resources' table + range: uint + required: true + multivalued: false + entity_id: + name: entity_id + description: The unique identifier entity. + range: text + required: true + multivalued: false + entity_uri: + name: entity_uri + description: The URI for the entity this reference applies to. This can be + an empty string. + range: text + required: true + multivalued: false + ExternalResources__resources: + name: ExternalResources__resources + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(resources) + range: string + required: true + equals_string: resources + resource: + name: resource + description: The name of the resource. + range: text + required: true + multivalued: false + resource_uri: + name: resource_uri + description: The URI for the resource. This can be an empty string. + range: text + required: true + multivalued: false + ExternalResources__objects: + name: ExternalResources__objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(objects) + range: string + required: true + equals_string: objects + object_id: + name: object_id + description: The UUID for the object. + range: text + required: true + multivalued: false + relative_path: + name: relative_path + description: The relative path from the container with the object_id to the + dataset or attribute with the value(s) that is associated with an external + resource. This can be an empty string if the container is a dataset which + contains the value(s) that is associated with an external resource. + range: text + required: true + multivalued: false + field: + name: field + description: The field of the compound data type using an external resource. + This is used only if the dataset or attribute is a compound data type; otherwise + this should be an empty string. + range: text + required: true + multivalued: false + ExternalResources__object_keys: + name: ExternalResources__object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + name: + name: name + ifabsent: string(object_keys) + range: string + required: true + equals_string: object_keys + objects_idx: + name: objects_idx + description: The index to the 'objects' table for the object that holds the + key. + range: uint + required: true + multivalued: false + keys_idx: + name: keys_idx + description: The index to the 'keys' table for the key. + range: uint + required: true + multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml new file mode 100644 index 0000000..6a311e0 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-experimental +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-experimental +description: Experimental data structures provided by HDMF. These are not guaranteed + to be available in the future. +id: hdmf-experimental +version: 0.2.0 +imports: +- hdmf-experimental.experimental +- hdmf-experimental.resources +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml new file mode 100644 index 0000000..fb98004 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml @@ -0,0 +1,31 @@ +name: hdmf-experimental.experimental +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.experimental +version: 0.3.0 +imports: +- ../../hdmf_common/v1_6_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.experimental/ +classes: + EnumData: + name: EnumData + description: Data that come from a fixed set of values. A data value of i corresponds + to the i-th value in the VectorData referenced by the 'elements' attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + elements: + name: elements + description: Reference to the VectorData object that contains the enumerable + elements + range: VectorData + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml new file mode 100644 index 0000000..a884e44 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-experimental.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml new file mode 100644 index 0000000..350ef24 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml @@ -0,0 +1,199 @@ +name: hdmf-experimental.resources +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.resources +version: 0.3.0 +imports: +- ../../hdmf_common/v1_6_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.resources/ +classes: + ExternalResources: + name: ExternalResources + description: 'A set of five tables for tracking external resource references in + a file. NOTE: this data type is experimental and is subject to change in a later + version.' + is_a: Container + attributes: + name: + name: name + range: string + required: true + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + range: ExternalResources__keys + required: true + multivalued: false + files: + name: files + description: A table for storing object ids of files used in external resources. + range: ExternalResources__files + required: true + multivalued: false + entities: + name: entities + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__entities + required: true + multivalued: false + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + range: ExternalResources__objects + required: true + multivalued: false + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + range: ExternalResources__object_keys + required: true + multivalued: false + tree_root: true + ExternalResources__keys: + name: ExternalResources__keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(keys) + range: string + required: true + equals_string: keys + key: + name: key + description: The user term that maps to one or more resources in the `resources` + table, e.g., "human". + range: text + required: true + multivalued: false + ExternalResources__files: + name: ExternalResources__files + description: A table for storing object ids of files used in external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(files) + range: string + required: true + equals_string: files + file_object_id: + name: file_object_id + description: The object id (UUID) of a file that contains objects that refers + to external resources. + range: text + required: true + multivalued: false + ExternalResources__entities: + name: ExternalResources__entities + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entities) + range: string + required: true + equals_string: entities + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + range: uint + required: true + multivalued: false + entity_id: + name: entity_id + description: The compact uniform resource identifier (CURIE) of the entity, + in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. + range: text + required: true + multivalued: false + entity_uri: + name: entity_uri + description: The URI for the entity this reference applies to. This can be + an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 + range: text + required: true + multivalued: false + ExternalResources__objects: + name: ExternalResources__objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(objects) + range: string + required: true + equals_string: objects + files_idx: + name: files_idx + description: The row index to the file in the `files` table containing the + object. + range: uint + required: true + multivalued: false + object_id: + name: object_id + description: The object id (UUID) of the object. + range: text + required: true + multivalued: false + object_type: + name: object_type + description: The data type of the object. + range: text + required: true + multivalued: false + relative_path: + name: relative_path + description: The relative path from the data object with the `object_id` to + the dataset or attribute with the value(s) that is associated with an external + resource. This can be an empty string if the object is a dataset that contains + the value(s) that is associated with an external resource. + range: text + required: true + multivalued: false + field: + name: field + description: The field within the compound data type using an external resource. + This is used only if the dataset or attribute is a compound data type; otherwise + this should be an empty string. + range: text + required: true + multivalued: false + ExternalResources__object_keys: + name: ExternalResources__object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + name: + name: name + ifabsent: string(object_keys) + range: string + required: true + equals_string: object_keys + objects_idx: + name: objects_idx + description: The row index to the object in the `objects` table that holds + the key + range: uint + required: true + multivalued: false + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + range: uint + required: true + multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml new file mode 100644 index 0000000..fe62e64 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-experimental +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-experimental +description: Experimental data structures provided by HDMF. These are not guaranteed + to be available in the future. +id: hdmf-experimental +version: 0.3.0 +imports: +- hdmf-experimental.experimental +- hdmf-experimental.resources +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml new file mode 100644 index 0000000..31c2867 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml @@ -0,0 +1,31 @@ +name: hdmf-experimental.experimental +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.experimental +version: 0.4.0 +imports: +- ../../hdmf_common/v1_7_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.experimental/ +classes: + EnumData: + name: EnumData + description: Data that come from a fixed set of values. A data value of i corresponds + to the i-th value in the VectorData referenced by the 'elements' attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + elements: + name: elements + description: Reference to the VectorData object that contains the enumerable + elements + range: VectorData + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml new file mode 100644 index 0000000..a884e44 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml @@ -0,0 +1,109 @@ +name: hdmf-experimental.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + repr: np.float32 + float64: + name: float64 + typeof: double + repr: np.float64 + long: + name: long + typeof: integer + repr: np.longlong + int64: + name: int64 + typeof: integer + repr: np.int64 + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + repr: np.int32 + int16: + name: int16 + typeof: integer + repr: np.int16 + short: + name: short + typeof: integer + repr: np.int16 + int8: + name: int8 + typeof: integer + repr: np.int8 + uint: + name: uint + typeof: integer + repr: np.uint64 + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + repr: np.uint32 + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + repr: np.uint16 + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + repr: np.uint8 + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + repr: np.uint64 + minimum_value: 0 + numeric: + name: numeric + typeof: float + repr: np.number + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime + repr: np.datetime64 +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml new file mode 100644 index 0000000..8768e73 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml @@ -0,0 +1,222 @@ +name: hdmf-experimental.resources +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.resources +version: 0.4.0 +imports: +- ../../hdmf_common/v1_7_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.resources/ +classes: + ExternalResources: + name: ExternalResources + description: 'A set of five tables for tracking external resource references in + a file. NOTE: this data type is experimental and is subject to change in a later + version.' + is_a: Container + attributes: + name: + name: name + range: string + required: true + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + range: ExternalResources__keys + required: true + multivalued: false + files: + name: files + description: A table for storing object ids of files used in external resources. + range: ExternalResources__files + required: true + multivalued: false + entities: + name: entities + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__entities + required: true + multivalued: false + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + range: ExternalResources__objects + required: true + multivalued: false + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + range: ExternalResources__object_keys + required: true + multivalued: false + entity_keys: + name: entity_keys + description: A table for identifying which keys use which entity. + range: ExternalResources__entity_keys + required: true + multivalued: false + tree_root: true + ExternalResources__keys: + name: ExternalResources__keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(keys) + range: string + required: true + equals_string: keys + key: + name: key + description: The user term that maps to one or more resources in the `resources` + table, e.g., "human". + range: text + required: true + multivalued: false + ExternalResources__files: + name: ExternalResources__files + description: A table for storing object ids of files used in external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(files) + range: string + required: true + equals_string: files + file_object_id: + name: file_object_id + description: The object id (UUID) of a file that contains objects that refers + to external resources. + range: text + required: true + multivalued: false + ExternalResources__entities: + name: ExternalResources__entities + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entities) + range: string + required: true + equals_string: entities + entity_id: + name: entity_id + description: The compact uniform resource identifier (CURIE) of the entity, + in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. + range: text + required: true + multivalued: false + entity_uri: + name: entity_uri + description: The URI for the entity this reference applies to. This can be + an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 + range: text + required: true + multivalued: false + ExternalResources__objects: + name: ExternalResources__objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(objects) + range: string + required: true + equals_string: objects + files_idx: + name: files_idx + description: The row index to the file in the `files` table containing the + object. + range: uint + required: true + multivalued: false + object_id: + name: object_id + description: The object id (UUID) of the object. + range: text + required: true + multivalued: false + object_type: + name: object_type + description: The data type of the object. + range: text + required: true + multivalued: false + relative_path: + name: relative_path + description: The relative path from the data object with the `object_id` to + the dataset or attribute with the value(s) that is associated with an external + resource. This can be an empty string if the object is a dataset that contains + the value(s) that is associated with an external resource. + range: text + required: true + multivalued: false + field: + name: field + description: The field within the compound data type using an external resource. + This is used only if the dataset or attribute is a compound data type; otherwise + this should be an empty string. + range: text + required: true + multivalued: false + ExternalResources__object_keys: + name: ExternalResources__object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + name: + name: name + ifabsent: string(object_keys) + range: string + required: true + equals_string: object_keys + objects_idx: + name: objects_idx + description: The row index to the object in the `objects` table that holds + the key + range: uint + required: true + multivalued: false + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + range: uint + required: true + multivalued: false + ExternalResources__entity_keys: + name: ExternalResources__entity_keys + description: A table for identifying which keys use which entity. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entity_keys) + range: string + required: true + equals_string: entity_keys + entities_idx: + name: entities_idx + description: The row index to the entity in the `entities` table. + range: uint + required: true + multivalued: false + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + range: uint + required: true + multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml new file mode 100644 index 0000000..a48814e --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-experimental +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-experimental +description: Experimental data structures provided by HDMF. These are not guaranteed + to be available in the future. +id: hdmf-experimental +version: 0.4.0 +imports: +- hdmf-experimental.experimental +- hdmf-experimental.resources +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml index 5bb0e2b..a884e44 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/tests/fixtures.py b/nwb_linkml/tests/fixtures.py index 092ba60..ee4236b 100644 --- a/nwb_linkml/tests/fixtures.py +++ b/nwb_linkml/tests/fixtures.py @@ -90,11 +90,12 @@ def nwb_core_fixture(request) -> NamespacesAdapter: return nwb_core + @pytest.fixture(scope="session") def nwb_core_linkml(nwb_core_fixture, tmp_output_dir) -> LinkMLSchemaBuild: provider = LinkMLProvider(tmp_output_dir, allow_repo=False, verbose=False) result = provider.build(ns_adapter=nwb_core_fixture, force=True) - return result['core'] + return result["core"] @pytest.fixture(scope="session") @@ -104,13 +105,10 @@ def nwb_core_module(nwb_core_linkml: LinkMLSchemaBuild, tmp_output_dir) -> Modul """ provider = PydanticProvider(tmp_output_dir, verbose=False) result = provider.build(nwb_core_linkml.namespace, force=True) - mod = provider.get('core', version=nwb_core_linkml.version, allow_repo=False) + mod = provider.get("core", version=nwb_core_linkml.version, allow_repo=False) return mod - - - @pytest.fixture(scope="session") def data_dir() -> Path: path = Path(__file__).parent.resolve() / "data" diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 572a651..b4da94b 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -32,6 +32,5 @@ def electrical_series() -> Tuple["ElectricalSeries", "NWBFileGeneralExtracellula id=np.arange(0, n_electrodes), x=np.arange(0, n_electrodes), y=np.arange(n_electrodes, n_electrodes * 2), - group=[electrode_group]*n_electrodes, - + group=[electrode_group] * n_electrodes, ) From 38e8a6f7a0665f79db160e26f9d1e9f8730959fe Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 31 Jul 2024 01:27:45 -0700 Subject: [PATCH 03/61] lint --- .../src/nwb_linkml/generators/pydantic.py | 76 ++++++++----------- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 33 ++++---- nwb_linkml/src/nwb_linkml/providers/linkml.py | 4 +- nwb_linkml/tests/fixtures.py | 2 +- nwb_linkml/tests/test_includes/test_hdmf.py | 3 +- 5 files changed, 48 insertions(+), 70 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index 0cc613d..59f7d4a 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -1,75 +1,43 @@ """ Subclass of :class:`linkml.generators.PydanticGenerator` +customized to support NWB models. -The pydantic generator is a subclass of -- :class:`linkml.utils.generator.Generator` -- :class:`linkml.generators.oocodegen.OOCodeGenerator` - -The default `__main__` method -- Instantiates the class -- Calls :meth:`~linkml.generators.PydanticGenerator.serialize` - -The `serialize` method: - -- Accepts an optional jinja-style template, otherwise it uses the default template -- Uses :class:`linkml_runtime.utils.schemaview.SchemaView` to interact with the schema -- Generates linkML Classes - - `generate_enums` runs first - -.. note:: - - This module is heinous. We have mostly copied and pasted the existing :class:`linkml.generators.PydanticGenerator` - and overridden what we need to make this work for NWB, but the source is... - a little messy. We will be tidying this up and trying to pull changes upstream, - but for now this is just our hacky little secret. - +See class and module docstrings for details :) """ -# FIXME: Remove this after we refactor this generator -# ruff: noqa - -import inspect -import pdb import re import sys -import warnings -from copy import copy from dataclasses import dataclass, field from pathlib import Path from types import ModuleType -from typing import ClassVar, Dict, List, Optional, Tuple, Type, Union +from typing import ClassVar, Dict, List, Optional, Tuple from linkml.generators import PydanticGenerator -from linkml.generators.pydanticgen.build import SlotResult, ClassResult from linkml.generators.pydanticgen.array import ArrayRepresentation, NumpydanticArray -from linkml.generators.pydanticgen.template import PydanticModule, Import, Imports +from linkml.generators.pydanticgen.build import ClassResult, SlotResult +from linkml.generators.pydanticgen.template import Import, Imports, PydanticModule from linkml_runtime.linkml_model.meta import ( - Annotation, - AnonymousSlotExpression, ArrayExpression, - ClassDefinition, - ClassDefinitionName, - ElementName, SchemaDefinition, SlotDefinition, SlotDefinitionName, ) from linkml_runtime.utils.compile_python import file_text -from linkml_runtime.utils.formatutils import camelcase, underscore, remove_empty_items +from linkml_runtime.utils.formatutils import remove_empty_items from linkml_runtime.utils.schemaview import SchemaView -from pydantic import BaseModel - -from nwb_linkml.maps import flat_to_nptyping -from nwb_linkml.maps.naming import module_case, version_module_case -from nwb_linkml.includes.types import ModelTypeString, _get_name, NamedString, NamedImports from nwb_linkml.includes.hdmf import DYNAMIC_TABLE_IMPORTS, DYNAMIC_TABLE_INJECTS +from nwb_linkml.includes.types import ModelTypeString, NamedImports, NamedString, _get_name OPTIONAL_PATTERN = re.compile(r"Optional\[([\w\.]*)\]") @dataclass class NWBPydanticGenerator(PydanticGenerator): + """ + Subclass of pydantic generator, custom behavior is in overridden lifecycle methods :) + """ + injected_fields: List[str] = ( ( @@ -96,7 +64,7 @@ class NWBPydanticGenerator(PydanticGenerator): def _check_anyof( self, s: SlotDefinition, sn: SlotDefinitionName, sv: SchemaView - ): # pragma: no cover + ) -> None: # pragma: no cover """ Overridden to allow `array` in any_of """ @@ -108,7 +76,7 @@ class NWBPydanticGenerator(PydanticGenerator): allowed = True for option in s.any_of: items = remove_empty_items(option) - if not all([key in allowed_keys for key in items.keys()]): + if not all([key in allowed_keys for key in items]): allowed = False if allowed: return @@ -132,10 +100,14 @@ class NWBPydanticGenerator(PydanticGenerator): return slot def after_generate_class(self, cls: ClassResult, sv: SchemaView) -> ClassResult: + """Customize dynamictable behavior""" cls = AfterGenerateClass.inject_dynamictable(cls) return cls def before_render_template(self, template: PydanticModule, sv: SchemaView) -> PydanticModule: + """ + Remove source file from metadata + """ if "source_file" in template.meta: del template.meta["source_file"] return template @@ -167,6 +139,9 @@ class AfterGenerateSlot: @staticmethod def skip_meta(slot: SlotResult, skip_meta: tuple[str]) -> SlotResult: + """ + Skip additional metadata slots + """ for key in skip_meta: if key in slot.attribute.meta: del slot.attribute.meta[key] @@ -242,6 +217,14 @@ class AfterGenerateClass: @staticmethod def inject_dynamictable(cls: ClassResult) -> ClassResult: + """ + Modify dynamictable class bases and inject needed objects :) + Args: + cls: + + Returns: + + """ if cls.cls.name == "DynamicTable": cls.cls.bases = ["DynamicTableMixin"] @@ -269,7 +252,8 @@ def compile_python( """ Compile the text or file and return the resulting module @param text_or_fn: Python text or file name that references python file - @param package_path: Root package path. If omitted and we've got a python file, the package is the containing + @param package_path: Root package path. If omitted and we've got a python file, + the package is the containing directory @return: Compiled module """ diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index c86499b..9027ae6 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -2,10 +2,9 @@ Special types for mimicking HDMF special case behavior """ -from typing import Any, ClassVar, Dict, List, Optional, Union, Tuple, overload, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Tuple, Union, overload - -from linkml.generators.pydanticgen.template import Imports, Import, ObjectImport +from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport from numpydantic import NDArray from pandas import DataFrame from pydantic import BaseModel, ConfigDict, Field, model_validator @@ -133,7 +132,7 @@ class DynamicTableMixin(BaseModel): @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]): + def create_colnames(cls, model: Dict[str, Any]) -> None: """ Construct colnames from arguments. @@ -142,19 +141,17 @@ class DynamicTableMixin(BaseModel): """ if "colnames" not in model: colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") ] model["colnames"] = colnames else: # add any columns not explicitly given an order at the end colnames = [ k - for k in model.keys() + for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"].keys() + and k not in model["colnames"] ] model["colnames"].extend(colnames) return model @@ -171,13 +168,11 @@ class DynamicTableMixin(BaseModel): for field_name in self.model_fields_set: # implicit name-based index field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break if idx is not None: col._index = idx idx.target = col @@ -201,7 +196,7 @@ class VectorDataMixin(BaseModel): else: return self.array[item] - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -218,7 +213,7 @@ class VectorIndexMixin(BaseModel): array: Optional[NDArray] = None target: Optional["VectorData"] = None - def _getitem_helper(self, arg: int): + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ @@ -239,7 +234,7 @@ class VectorIndexMixin(BaseModel): else: raise NotImplementedError("DynamicTableRange not supported yet") - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: # VectorIndex is the thing that knows how to do the slicing self._index[key] = value diff --git a/nwb_linkml/src/nwb_linkml/providers/linkml.py b/nwb_linkml/src/nwb_linkml/providers/linkml.py index f868de7..4fc6233 100644 --- a/nwb_linkml/src/nwb_linkml/providers/linkml.py +++ b/nwb_linkml/src/nwb_linkml/providers/linkml.py @@ -3,9 +3,9 @@ Provider for LinkML schema built from NWB schema """ import shutil -from pathlib import Path -from typing import Dict, Optional, TypedDict from dataclasses import dataclass +from pathlib import Path +from typing import Dict, Optional from linkml_runtime import SchemaView from linkml_runtime.dumpers import yaml_dumper diff --git a/nwb_linkml/tests/fixtures.py b/nwb_linkml/tests/fixtures.py index ee4236b..3ab2d3c 100644 --- a/nwb_linkml/tests/fixtures.py +++ b/nwb_linkml/tests/fixtures.py @@ -15,9 +15,9 @@ from linkml_runtime.linkml_model import ( ) from nwb_linkml.adapters.namespaces import NamespacesAdapter +from nwb_linkml.io import schema as io from nwb_linkml.providers import LinkMLProvider, PydanticProvider from nwb_linkml.providers.linkml import LinkMLSchemaBuild -from nwb_linkml.io import schema as io from nwb_schema_language import Attribute, Dataset, Group __all__ = [ diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index b4da94b..26f5109 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -1,5 +1,4 @@ -from typing import Tuple, TYPE_CHECKING -from types import ModuleType +from typing import Tuple import numpy as np import pytest From 776078ae6730e898eb5754ec1234b730369c1811 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 31 Jul 2024 01:30:06 -0700 Subject: [PATCH 04/61] add pandas --- nwb_linkml/pdm.lock | 68 ++++++++++++++++++++++++++++++++++++--- nwb_linkml/pyproject.toml | 1 + 2 files changed, 65 insertions(+), 4 deletions(-) diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock index f11882c..7306eec 100644 --- a/nwb_linkml/pdm.lock +++ b/nwb_linkml/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "tests"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.2" -content_hash = "sha256:5f3b06ea8b752b60c44c392e6079a1e0af96ad60d4442faeef88bf055ee90529" +content_hash = "sha256:a4d05bde012bc589ce24125d77ba68afedea5cf98ff044b6a0fb27f44d3982f1" [[package]] name = "annotated-types" @@ -839,20 +839,20 @@ files = [ [[package]] name = "linkml" version = "0.0.0" -requires_python = "<4.0.0,>=3.8.1" +requires_python = ">=3.8.1,<4.0.0" git = "https://github.com/sneakers-the-rat/linkml" ref = "nwb-linkml" revision = "df8685eb9e99eaf9ec694db2e9cd59bab8892438" summary = "Linked Open Data Modeling Language" groups = ["default", "dev", "tests"] dependencies = [ - "antlr4-python3-runtime<4.10,==4.*,>=4.9.0", + "antlr4-python3-runtime<4.10,>=4.9.0", "click>=7.0", "graphviz>=0.10.1", "hbreader", "isodate>=0.6.0", "jinja2>=3.1.0", - "jsonasobj2==1.*,>=1.0.0,>=1.0.3", + "jsonasobj2<2.dev0,>=1.0.3", "jsonschema[format]>=4.0.0", "linkml-dataops", "linkml-runtime==1.8.0", @@ -1132,6 +1132,45 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] +[[package]] +name = "pandas" +version = "2.2.2" +requires_python = ">=3.9" +summary = "Powerful data structures for data analysis, time series, and statistics" +groups = ["default", "dev", "tests"] +dependencies = [ + "numpy>=1.22.4; python_version < \"3.11\"", + "numpy>=1.23.2; python_version == \"3.11\"", + "numpy>=1.26.0; python_version >= \"3.12\"", + "python-dateutil>=2.8.2", + "pytz>=2020.1", + "tzdata>=2022.7", +] +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + [[package]] name = "parse" version = "1.20.2" @@ -1549,6 +1588,16 @@ files = [ {file = "PyTrie-0.4.0.tar.gz", hash = "sha256:8f4488f402d3465993fb6b6efa09866849ed8cda7903b50647b7d0342b805379"}, ] +[[package]] +name = "pytz" +version = "2024.1" +summary = "World timezone definitions, modern and historical" +groups = ["default", "dev", "tests"] +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -2072,6 +2121,17 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2024.1" +requires_python = ">=2" +summary = "Provider of IANA time zone data" +groups = ["default", "dev", "tests"] +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "uri-template" version = "1.3.0" diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index d24847a..f1b7268 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -24,6 +24,7 @@ dependencies = [ 'typing-extensions>=4.12.2;python_version<"3.11"', "numpydantic>=1.2.1", "black>=24.4.2", + "pandas>=2.2.2", ] [project.urls] From 1d527d8f715daff0e4d3c266d14eb7bc6337d31d Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 31 Jul 2024 01:54:14 -0700 Subject: [PATCH 05/61] fix hdmf forwardrefs, remove numpy types temporarily --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 10 +++++----- nwb_linkml/src/nwb_linkml/lang_elements.py | 9 +++++---- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 9027ae6..10aa05f 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -22,7 +22,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -33,15 +33,15 @@ class DynamicTableMixin(BaseModel): colnames: List[str] = Field(default_factory=list) @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: return [getattr(self, k) for i, k in enumerate(self.colnames)] @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload def __getitem__(self, item: int) -> DataFrame: ... @@ -54,7 +54,7 @@ class DynamicTableMixin(BaseModel): DataFrame, list, "NDArray", - "VectorData", + "VectorDataMixin", ]: ... @overload diff --git a/nwb_linkml/src/nwb_linkml/lang_elements.py b/nwb_linkml/src/nwb_linkml/lang_elements.py index 7bb68c4..4ebd6d4 100644 --- a/nwb_linkml/src/nwb_linkml/lang_elements.py +++ b/nwb_linkml/src/nwb_linkml/lang_elements.py @@ -12,7 +12,7 @@ from linkml_runtime.linkml_model import ( TypeDefinition, ) -from nwb_linkml.maps import flat_to_linkml, flat_to_np +from nwb_linkml.maps import flat_to_linkml def _make_dtypes() -> List[TypeDefinition]: @@ -27,12 +27,13 @@ def _make_dtypes() -> List[TypeDefinition]: if nwbtype.startswith("uint"): amin = 0 - np_type = flat_to_np[nwbtype] + # FIXME: Restore numpy types when we wrap them :) + # np_type = flat_to_np[nwbtype] - repr_string = f"np.{np_type.__name__}" if np_type.__module__ == "numpy" else None + # repr_string = f"np.{np_type.__name__}" if np_type.__module__ == "numpy" else None atype = TypeDefinition( - name=nwbtype, minimum_value=amin, typeof=linkmltype, repr=repr_string + name=nwbtype, minimum_value=amin, typeof=linkmltype, # repr=repr_string ) DTypeTypes.append(atype) return DTypeTypes From 0d10ba9d754bd1d91a84a9ca09b1ec0a548d8f42 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 31 Jul 2024 01:56:48 -0700 Subject: [PATCH 06/61] regenerate models, lint --- .../src/nwb_linkml/generators/pydantic.py | 1 - nwb_linkml/src/nwb_linkml/lang_elements.py | 4 +- .../pydantic/core/v2_2_0/core_nwb_base.py | 20 +- .../pydantic/core/v2_2_0/core_nwb_behavior.py | 8 +- .../pydantic/core/v2_2_0/core_nwb_ecephys.py | 46 +- .../pydantic/core/v2_2_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_0/core_nwb_file.py | 24 +- .../pydantic/core/v2_2_0/core_nwb_icephys.py | 80 ++-- .../pydantic/core/v2_2_0/core_nwb_image.py | 67 ++- .../pydantic/core/v2_2_0/core_nwb_misc.py | 42 +- .../pydantic/core/v2_2_0/core_nwb_ogen.py | 8 +- .../pydantic/core/v2_2_0/core_nwb_ophys.py | 44 +- .../core/v2_2_0/core_nwb_retinotopy.py | 52 +- .../pydantic/core/v2_2_1/core_nwb_base.py | 20 +- .../pydantic/core/v2_2_1/core_nwb_behavior.py | 8 +- .../pydantic/core/v2_2_1/core_nwb_ecephys.py | 46 +- .../pydantic/core/v2_2_1/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_1/core_nwb_file.py | 24 +- .../pydantic/core/v2_2_1/core_nwb_icephys.py | 80 ++-- .../pydantic/core/v2_2_1/core_nwb_image.py | 67 ++- .../pydantic/core/v2_2_1/core_nwb_misc.py | 42 +- .../pydantic/core/v2_2_1/core_nwb_ogen.py | 8 +- .../pydantic/core/v2_2_1/core_nwb_ophys.py | 44 +- .../core/v2_2_1/core_nwb_retinotopy.py | 52 +- .../pydantic/core/v2_2_2/core_nwb_base.py | 20 +- .../pydantic/core/v2_2_2/core_nwb_behavior.py | 8 +- .../pydantic/core/v2_2_2/core_nwb_ecephys.py | 46 +- .../pydantic/core/v2_2_2/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_2/core_nwb_file.py | 24 +- .../pydantic/core/v2_2_2/core_nwb_icephys.py | 80 ++-- .../pydantic/core/v2_2_2/core_nwb_image.py | 67 ++- .../pydantic/core/v2_2_2/core_nwb_misc.py | 42 +- .../pydantic/core/v2_2_2/core_nwb_ogen.py | 8 +- .../pydantic/core/v2_2_2/core_nwb_ophys.py | 26 +- .../core/v2_2_2/core_nwb_retinotopy.py | 64 +-- .../pydantic/core/v2_2_4/core_nwb_base.py | 20 +- .../pydantic/core/v2_2_4/core_nwb_behavior.py | 8 +- .../pydantic/core/v2_2_4/core_nwb_ecephys.py | 46 +- .../pydantic/core/v2_2_4/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_4/core_nwb_file.py | 24 +- .../pydantic/core/v2_2_4/core_nwb_icephys.py | 80 ++-- .../pydantic/core/v2_2_4/core_nwb_image.py | 67 ++- .../pydantic/core/v2_2_4/core_nwb_misc.py | 42 +- .../pydantic/core/v2_2_4/core_nwb_ogen.py | 8 +- .../pydantic/core/v2_2_4/core_nwb_ophys.py | 44 +- .../core/v2_2_4/core_nwb_retinotopy.py | 64 +-- .../pydantic/core/v2_2_5/core_nwb_base.py | 20 +- .../pydantic/core/v2_2_5/core_nwb_behavior.py | 8 +- .../pydantic/core/v2_2_5/core_nwb_ecephys.py | 46 +- .../pydantic/core/v2_2_5/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_5/core_nwb_file.py | 24 +- .../pydantic/core/v2_2_5/core_nwb_icephys.py | 80 ++-- .../pydantic/core/v2_2_5/core_nwb_image.py | 67 ++- .../pydantic/core/v2_2_5/core_nwb_misc.py | 42 +- .../pydantic/core/v2_2_5/core_nwb_ogen.py | 8 +- .../pydantic/core/v2_2_5/core_nwb_ophys.py | 42 +- .../core/v2_2_5/core_nwb_retinotopy.py | 64 +-- .../pydantic/core/v2_3_0/core_nwb_base.py | 20 +- .../pydantic/core/v2_3_0/core_nwb_behavior.py | 8 +- .../pydantic/core/v2_3_0/core_nwb_ecephys.py | 46 +- .../pydantic/core/v2_3_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_3_0/core_nwb_file.py | 24 +- .../pydantic/core/v2_3_0/core_nwb_icephys.py | 80 ++-- .../pydantic/core/v2_3_0/core_nwb_image.py | 67 ++- .../pydantic/core/v2_3_0/core_nwb_misc.py | 44 +- .../pydantic/core/v2_3_0/core_nwb_ogen.py | 8 +- .../pydantic/core/v2_3_0/core_nwb_ophys.py | 42 +- .../core/v2_3_0/core_nwb_retinotopy.py | 64 +-- .../pydantic/core/v2_4_0/core_nwb_base.py | 24 +- .../pydantic/core/v2_4_0/core_nwb_behavior.py | 8 +- .../pydantic/core/v2_4_0/core_nwb_ecephys.py | 46 +- .../pydantic/core/v2_4_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_4_0/core_nwb_file.py | 22 +- .../pydantic/core/v2_4_0/core_nwb_icephys.py | 80 ++-- .../pydantic/core/v2_4_0/core_nwb_image.py | 65 ++- .../pydantic/core/v2_4_0/core_nwb_misc.py | 44 +- .../pydantic/core/v2_4_0/core_nwb_ogen.py | 8 +- .../pydantic/core/v2_4_0/core_nwb_ophys.py | 41 +- .../core/v2_4_0/core_nwb_retinotopy.py | 64 +-- .../pydantic/core/v2_5_0/core_nwb_base.py | 26 +- .../pydantic/core/v2_5_0/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_5_0/core_nwb_ecephys.py | 46 +- .../pydantic/core/v2_5_0/core_nwb_epoch.py | 4 +- .../pydantic/core/v2_5_0/core_nwb_file.py | 22 +- .../pydantic/core/v2_5_0/core_nwb_icephys.py | 80 ++-- .../pydantic/core/v2_5_0/core_nwb_image.py | 65 ++- .../pydantic/core/v2_5_0/core_nwb_misc.py | 44 +- .../pydantic/core/v2_5_0/core_nwb_ogen.py | 8 +- .../pydantic/core/v2_5_0/core_nwb_ophys.py | 41 +- .../core/v2_5_0/core_nwb_retinotopy.py | 64 +-- .../core/v2_6_0_alpha/core_nwb_base.py | 26 +- .../core/v2_6_0_alpha/core_nwb_behavior.py | 12 +- .../core/v2_6_0_alpha/core_nwb_ecephys.py | 46 +- .../core/v2_6_0_alpha/core_nwb_epoch.py | 4 +- .../core/v2_6_0_alpha/core_nwb_file.py | 22 +- .../core/v2_6_0_alpha/core_nwb_icephys.py | 80 ++-- .../core/v2_6_0_alpha/core_nwb_image.py | 65 ++- .../core/v2_6_0_alpha/core_nwb_misc.py | 44 +- .../core/v2_6_0_alpha/core_nwb_ogen.py | 8 +- .../core/v2_6_0_alpha/core_nwb_ophys.py | 64 ++- .../core/v2_6_0_alpha/core_nwb_retinotopy.py | 64 +-- .../pydantic/core/v2_7_0/core_nwb_base.py | 26 +- .../pydantic/core/v2_7_0/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_7_0/core_nwb_ecephys.py | 46 +- .../pydantic/core/v2_7_0/core_nwb_epoch.py | 4 +- .../pydantic/core/v2_7_0/core_nwb_file.py | 22 +- .../pydantic/core/v2_7_0/core_nwb_icephys.py | 80 ++-- .../pydantic/core/v2_7_0/core_nwb_image.py | 65 ++- .../pydantic/core/v2_7_0/core_nwb_misc.py | 44 +- .../pydantic/core/v2_7_0/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_7_0/core_nwb_ophys.py | 64 ++- .../core/v2_7_0/core_nwb_retinotopy.py | 64 +-- .../hdmf_common/v1_1_0/hdmf_common_table.py | 38 +- .../hdmf_common/v1_1_2/hdmf_common_table.py | 38 +- .../hdmf_common/v1_1_3/hdmf_common_table.py | 38 +- .../pydantic/hdmf_common/v1_2_0/__init__.py | 1 - .../hdmf_common/v1_2_0/hdmf_common_base.py | 88 ---- .../hdmf_common/v1_2_0/hdmf_common_sparse.py | 125 ----- .../hdmf_common/v1_2_0/hdmf_common_table.py | 449 ----------------- .../pydantic/hdmf_common/v1_2_0/namespace.py | 83 ---- .../pydantic/hdmf_common/v1_2_1/__init__.py | 1 - .../hdmf_common/v1_2_1/hdmf_common_base.py | 104 ---- .../hdmf_common/v1_2_1/hdmf_common_sparse.py | 126 ----- .../hdmf_common/v1_2_1/hdmf_common_table.py | 449 ----------------- .../pydantic/hdmf_common/v1_2_1/namespace.py | 83 ---- .../pydantic/hdmf_common/v1_3_0/__init__.py | 1 - .../hdmf_common/v1_3_0/hdmf_common_base.py | 104 ---- .../v1_3_0/hdmf_common_resources.py | 181 ------- .../hdmf_common/v1_3_0/hdmf_common_sparse.py | 110 ----- .../hdmf_common/v1_3_0/hdmf_common_table.py | 449 ----------------- .../pydantic/hdmf_common/v1_3_0/namespace.py | 86 ---- .../pydantic/hdmf_common/v1_4_0/__init__.py | 1 - .../hdmf_common/v1_4_0/hdmf_common_base.py | 104 ---- .../hdmf_common/v1_4_0/hdmf_common_sparse.py | 110 ----- .../hdmf_common/v1_4_0/hdmf_common_table.py | 422 ---------------- .../pydantic/hdmf_common/v1_4_0/namespace.py | 77 --- .../hdmf_common/v1_5_0/hdmf_common_sparse.py | 6 +- .../hdmf_common/v1_5_0/hdmf_common_table.py | 38 +- .../pydantic/hdmf_common/v1_5_1/__init__.py | 1 - .../hdmf_common/v1_5_1/hdmf_common_base.py | 104 ---- .../hdmf_common/v1_5_1/hdmf_common_sparse.py | 110 ----- .../hdmf_common/v1_5_1/hdmf_common_table.py | 453 ------------------ .../pydantic/hdmf_common/v1_5_1/namespace.py | 78 --- .../pydantic/hdmf_common/v1_6_0/__init__.py | 1 - .../hdmf_common/v1_6_0/hdmf_common_base.py | 104 ---- .../hdmf_common/v1_6_0/hdmf_common_sparse.py | 110 ----- .../hdmf_common/v1_6_0/hdmf_common_table.py | 453 ------------------ .../pydantic/hdmf_common/v1_6_0/namespace.py | 78 --- .../pydantic/hdmf_common/v1_7_0/__init__.py | 1 - .../hdmf_common/v1_7_0/hdmf_common_base.py | 104 ---- .../hdmf_common/v1_7_0/hdmf_common_sparse.py | 110 ----- .../hdmf_common/v1_7_0/hdmf_common_table.py | 453 ------------------ .../pydantic/hdmf_common/v1_7_0/namespace.py | 78 --- .../hdmf_common/v1_8_0/hdmf_common_sparse.py | 6 +- .../hdmf_common/v1_8_0/hdmf_common_table.py | 38 +- .../v0_1_0/hdmf_experimental_experimental.py | 4 +- .../v0_1_0/hdmf_experimental_resources.py | 12 +- .../hdmf_experimental/v0_1_0/namespace.py | 7 +- .../hdmf_experimental/v0_2_0/__init__.py | 1 - .../v0_2_0/hdmf_experimental_experimental.py | 93 ---- .../v0_2_0/hdmf_experimental_resources.py | 199 -------- .../hdmf_experimental/v0_2_0/namespace.py | 89 ---- .../hdmf_experimental/v0_3_0/__init__.py | 1 - .../v0_3_0/hdmf_experimental_experimental.py | 93 ---- .../v0_3_0/hdmf_experimental_resources.py | 207 -------- .../hdmf_experimental/v0_3_0/namespace.py | 89 ---- .../hdmf_experimental/v0_4_0/__init__.py | 1 - .../v0_4_0/hdmf_experimental_experimental.py | 93 ---- .../v0_4_0/hdmf_experimental_resources.py | 229 --------- .../hdmf_experimental/v0_4_0/namespace.py | 90 ---- .../v0_5_0/hdmf_experimental_resources.py | 14 +- .../linkml/core/v2_2_0/core.nwb.language.yaml | 15 - .../linkml/core/v2_2_1/core.nwb.language.yaml | 15 - .../linkml/core/v2_2_2/core.nwb.language.yaml | 15 - .../linkml/core/v2_2_4/core.nwb.language.yaml | 15 - .../linkml/core/v2_2_5/core.nwb.language.yaml | 15 - .../linkml/core/v2_3_0/core.nwb.language.yaml | 15 - .../linkml/core/v2_4_0/core.nwb.language.yaml | 15 - .../linkml/core/v2_5_0/core.nwb.language.yaml | 15 - .../core/v2_6_0_alpha/core.nwb.language.yaml | 15 - .../linkml/core/v2_7_0/core.nwb.language.yaml | 15 - .../v1_1_0/hdmf-common.nwb.language.yaml | 17 +- .../v1_1_2/hdmf-common.nwb.language.yaml | 17 +- .../v1_1_3/hdmf-common.nwb.language.yaml | 17 +- .../hdmf_common/v1_2_0/hdmf-common.base.yaml | 33 -- .../v1_2_0/hdmf-common.nwb.language.yaml | 109 ----- .../v1_2_0/hdmf-common.sparse.yaml | 75 --- .../hdmf_common/v1_2_0/hdmf-common.table.yaml | 181 ------- .../linkml/hdmf_common/v1_2_0/namespace.yaml | 17 - .../hdmf_common/v1_2_1/hdmf-common.base.yaml | 46 -- .../v1_2_1/hdmf-common.nwb.language.yaml | 109 ----- .../v1_2_1/hdmf-common.sparse.yaml | 77 --- .../hdmf_common/v1_2_1/hdmf-common.table.yaml | 181 ------- .../linkml/hdmf_common/v1_2_1/namespace.yaml | 17 - .../hdmf_common/v1_3_0/hdmf-common.base.yaml | 46 -- .../v1_3_0/hdmf-common.nwb.language.yaml | 109 ----- .../v1_3_0/hdmf-common.resources.yaml | 158 ------ .../v1_3_0/hdmf-common.sparse.yaml | 66 --- .../hdmf_common/v1_3_0/hdmf-common.table.yaml | 181 ------- .../linkml/hdmf_common/v1_3_0/namespace.yaml | 18 - .../hdmf_common/v1_4_0/hdmf-common.base.yaml | 46 -- .../v1_4_0/hdmf-common.nwb.language.yaml | 109 ----- .../v1_4_0/hdmf-common.sparse.yaml | 66 --- .../hdmf_common/v1_4_0/hdmf-common.table.yaml | 166 ------- .../linkml/hdmf_common/v1_4_0/namespace.yaml | 17 - .../v1_5_0/hdmf-common.nwb.language.yaml | 17 +- .../hdmf_common/v1_5_1/hdmf-common.base.yaml | 46 -- .../v1_5_1/hdmf-common.nwb.language.yaml | 109 ----- .../v1_5_1/hdmf-common.sparse.yaml | 66 --- .../hdmf_common/v1_5_1/hdmf-common.table.yaml | 185 ------- .../linkml/hdmf_common/v1_5_1/namespace.yaml | 17 - .../hdmf_common/v1_6_0/hdmf-common.base.yaml | 46 -- .../v1_6_0/hdmf-common.nwb.language.yaml | 109 ----- .../v1_6_0/hdmf-common.sparse.yaml | 66 --- .../hdmf_common/v1_6_0/hdmf-common.table.yaml | 185 ------- .../linkml/hdmf_common/v1_6_0/namespace.yaml | 17 - .../hdmf_common/v1_7_0/hdmf-common.base.yaml | 46 -- .../v1_7_0/hdmf-common.nwb.language.yaml | 109 ----- .../v1_7_0/hdmf-common.sparse.yaml | 66 --- .../hdmf_common/v1_7_0/hdmf-common.table.yaml | 185 ------- .../linkml/hdmf_common/v1_7_0/namespace.yaml | 17 - .../v1_8_0/hdmf-common.nwb.language.yaml | 17 +- .../hdmf-experimental.experimental.yaml | 2 +- .../hdmf-experimental.nwb.language.yaml | 17 +- .../v0_1_0/hdmf-experimental.resources.yaml | 2 +- .../hdmf-experimental.experimental.yaml | 31 -- .../hdmf-experimental.nwb.language.yaml | 109 ----- .../v0_2_0/hdmf-experimental.resources.yaml | 196 -------- .../hdmf_experimental/v0_2_0/namespace.yaml | 17 - .../hdmf-experimental.experimental.yaml | 31 -- .../hdmf-experimental.nwb.language.yaml | 109 ----- .../v0_3_0/hdmf-experimental.resources.yaml | 199 -------- .../hdmf_experimental/v0_3_0/namespace.yaml | 17 - .../hdmf-experimental.experimental.yaml | 31 -- .../hdmf-experimental.nwb.language.yaml | 109 ----- .../v0_4_0/hdmf-experimental.resources.yaml | 222 --------- .../hdmf_experimental/v0_4_0/namespace.yaml | 17 - .../hdmf-experimental.nwb.language.yaml | 17 +- 238 files changed, 2056 insertions(+), 13337 deletions(-) delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py delete mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml delete mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index 59f7d4a..d3d8395 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -38,7 +38,6 @@ class NWBPydanticGenerator(PydanticGenerator): Subclass of pydantic generator, custom behavior is in overridden lifecycle methods :) """ - injected_fields: List[str] = ( ( 'hdf5_path: Optional[str] = Field(None, description="The absolute path that this object' diff --git a/nwb_linkml/src/nwb_linkml/lang_elements.py b/nwb_linkml/src/nwb_linkml/lang_elements.py index 4ebd6d4..c199062 100644 --- a/nwb_linkml/src/nwb_linkml/lang_elements.py +++ b/nwb_linkml/src/nwb_linkml/lang_elements.py @@ -33,7 +33,9 @@ def _make_dtypes() -> List[TypeDefinition]: # repr_string = f"np.{np_type.__name__}" if np_type.__module__ == "numpy" else None atype = TypeDefinition( - name=nwbtype, minimum_value=amin, typeof=linkmltype, # repr=repr_string + name=nwbtype, + minimum_value=amin, + typeof=linkmltype, # repr=repr_string ) DTypeTypes.append(atype) return DTypeTypes diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py index 4bc6c35..17d7363 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -83,15 +83,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py index e574c6b..c258f24 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index 74b8d44..22d4a40 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -424,17 +424,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py index d802c37..1cf2bb7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[np.int32] = Field( + idx_start: Optional[int] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: Optional[np.int32] = Field( + count: Optional[int] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py index cf1adbd..397abf7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -102,7 +102,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -116,11 +116,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -335,7 +335,7 @@ class Subject(NWBContainer): age: Optional[str] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( @@ -394,7 +394,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, np.float32] = Field( + x: NDArray[Any, float] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -403,7 +403,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, np.float32] = Field( + y: NDArray[Any, float] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -412,7 +412,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, np.float32] = Field( + z: NDArray[Any, float] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -421,7 +421,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, np.float32] = Field( + imp: NDArray[Any, float] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -439,7 +439,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, np.float32] = Field( + filtering: NDArray[Any, float] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -460,7 +460,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -469,7 +469,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -478,7 +478,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py index 7eb4679..8d6da27 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -738,7 +736,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py index fa26d72..db8401b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -71,15 +71,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -94,15 +94,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -117,15 +117,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -142,11 +142,11 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -168,12 +168,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -284,13 +284,12 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") orientation: Optional[str] = Field( @@ -299,11 +298,11 @@ class OpticalSeries(ImageSeries): ) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -325,12 +324,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -358,7 +357,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -372,12 +371,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py index 58ceb2e..968660d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -441,7 +441,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -474,14 +474,14 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: Optional[str] = Field( @@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py index cd3c8ac..02043ad 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.number] = Field( + data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py index 9ae5919..44b5845 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -109,24 +109,21 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ - Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height"], np.float32], - ] + Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -148,12 +145,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -182,8 +179,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -201,12 +197,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -280,8 +276,8 @@ class ImagingPlane(NWBContainer): name: str = Field(...) description: Optional[str] = Field(None, description="""Description of the imaging plane.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") - imaging_rate: np.float32 = Field(..., description="""Rate that images are acquired, in Hz.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""") indicator: str = Field(..., description="""Calcium indicator.""") location: str = Field( ..., @@ -321,7 +317,7 @@ class ImagingPlaneManifold(ConfiguredBaseModel): "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} }, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", ) @@ -331,8 +327,8 @@ class ImagingPlaneManifold(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* height, * width, 3 x_y_z"], np.float32], - NDArray[Shape["* height, * width, * depth, 3 x_y_z"], np.float32], + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], ] ] = Field(None) @@ -353,7 +349,7 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Measurement units for origin_coords. The default value is 'meters'.""" ) - array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field( + array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -384,7 +380,7 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" ) - array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field( + array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -408,9 +404,7 @@ class OpticalChannel(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: np.float32 = Field( - ..., description="""Emission wavelength for channel, in nm.""" - ) + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index ee2356f..7155a0fc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -96,14 +96,12 @@ class RetinotopyMap(NWBData): ) name: str = Field(...) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -124,19 +122,17 @@ class AxisMap(RetinotopyMap): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") class RetinotopyImage(GrayscaleImage): @@ -149,29 +145,27 @@ class RetinotopyImage(GrayscaleImage): ) name: str = Field(...) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -262,32 +256,28 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): } }, ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) - bits_per_pixel: Optional[np.int32] = Field( + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py index df1e7e7..c37673c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -83,15 +83,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py index 143813d..5020dfe 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index c098784..d8ed535 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -424,17 +424,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py index ad01523..0113b4d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[np.int32] = Field( + idx_start: Optional[int] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: Optional[np.int32] = Field( + count: Optional[int] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py index 3faf47c..c9f7dcd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -102,7 +102,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -116,11 +116,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -335,7 +335,7 @@ class Subject(NWBContainer): age: Optional[str] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( @@ -394,7 +394,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, np.float32] = Field( + x: NDArray[Any, float] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -403,7 +403,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, np.float32] = Field( + y: NDArray[Any, float] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -412,7 +412,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, np.float32] = Field( + z: NDArray[Any, float] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -421,7 +421,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, np.float32] = Field( + imp: NDArray[Any, float] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -439,7 +439,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, np.float32] = Field( + filtering: NDArray[Any, float] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -460,7 +460,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -469,7 +469,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -478,7 +478,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py index 3b96bf5..3cea17f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -738,7 +736,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py index f7b0d84..89e4978 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -71,15 +71,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -94,15 +94,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -117,15 +117,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -142,11 +142,11 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -168,12 +168,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -284,13 +284,12 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") orientation: Optional[str] = Field( @@ -299,11 +298,11 @@ class OpticalSeries(ImageSeries): ) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -325,12 +324,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -358,7 +357,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -372,12 +371,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py index 16f9bea..5d1881f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -441,7 +441,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -474,14 +474,14 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: Optional[str] = Field( @@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py index 7a99546..26c3fc8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.number] = Field( + data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py index 6b4e114..2ed7469 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -109,24 +109,21 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ - Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height"], np.float32], - ] + Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -148,12 +145,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -182,8 +179,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -201,12 +197,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -280,8 +276,8 @@ class ImagingPlane(NWBContainer): name: str = Field(...) description: Optional[str] = Field(None, description="""Description of the imaging plane.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") - imaging_rate: np.float32 = Field(..., description="""Rate that images are acquired, in Hz.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""") indicator: str = Field(..., description="""Calcium indicator.""") location: str = Field( ..., @@ -321,7 +317,7 @@ class ImagingPlaneManifold(ConfiguredBaseModel): "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} }, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", ) @@ -331,8 +327,8 @@ class ImagingPlaneManifold(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* height, * width, 3 x_y_z"], np.float32], - NDArray[Shape["* height, * width, * depth, 3 x_y_z"], np.float32], + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], ] ] = Field(None) @@ -353,7 +349,7 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Measurement units for origin_coords. The default value is 'meters'.""" ) - array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field( + array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -384,7 +380,7 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" ) - array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], np.float32]] = Field( + array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -408,9 +404,7 @@ class OpticalChannel(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: np.float32 = Field( - ..., description="""Emission wavelength for channel, in nm.""" - ) + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index 5fe79a1..7095d7e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -96,14 +96,12 @@ class RetinotopyMap(NWBData): ) name: str = Field(...) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -124,19 +122,17 @@ class AxisMap(RetinotopyMap): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") class RetinotopyImage(GrayscaleImage): @@ -149,29 +145,27 @@ class RetinotopyImage(GrayscaleImage): ) name: str = Field(...) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -262,32 +256,28 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): } }, ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) - bits_per_pixel: Optional[np.int32] = Field( + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py index 556fa21..0b34abd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -83,15 +83,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py index 39a00bd..3b1a5fb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index 08e515f..ac58cf4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -424,17 +424,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py index ec6ad30..31d8da0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[np.int32] = Field( + idx_start: Optional[int] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: Optional[np.int32] = Field( + count: Optional[int] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py index 32fe49a..fb720cb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -102,7 +102,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -116,11 +116,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -335,7 +335,7 @@ class Subject(NWBContainer): age: Optional[str] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( @@ -394,7 +394,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, np.float32] = Field( + x: NDArray[Any, float] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -403,7 +403,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, np.float32] = Field( + y: NDArray[Any, float] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -412,7 +412,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, np.float32] = Field( + z: NDArray[Any, float] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -421,7 +421,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, np.float32] = Field( + imp: NDArray[Any, float] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -439,7 +439,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, np.float32] = Field( + filtering: NDArray[Any, float] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -460,7 +460,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -469,7 +469,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -478,7 +478,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py index a7fd4fd..c34073a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -738,7 +736,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py index 11c8e94..92928c5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -71,15 +71,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -94,15 +94,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -117,15 +117,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -142,11 +142,11 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -168,12 +168,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -284,24 +284,23 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -323,12 +322,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -356,7 +355,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -370,12 +369,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py index 7694e0c..5f1e06a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -441,7 +441,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -474,14 +474,14 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: Optional[str] = Field( @@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py index 9be92dd..b34d130 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.number] = Field( + data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py index 4a1b85f..3fe9760 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -109,24 +109,21 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ - Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height"], np.float32], - ] + Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -148,12 +145,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -182,8 +179,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -201,12 +197,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index b1c56d8..8ba1ae3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py index 0cec2c7..8b60948 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -83,15 +83,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py index a3f0972..6bd2d26 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index b34ac7d..8e8528b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -424,17 +424,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 1cb1305..4f1a792 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[np.int32] = Field( + idx_start: Optional[int] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: Optional[np.int32] = Field( + count: Optional[int] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py index ac4135f..6aba2d7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -118,7 +118,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -132,11 +132,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -372,7 +372,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, np.float32] = Field( + x: NDArray[Any, float] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -381,7 +381,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, np.float32] = Field( + y: NDArray[Any, float] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -390,7 +390,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, np.float32] = Field( + z: NDArray[Any, float] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -399,7 +399,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, np.float32] = Field( + imp: NDArray[Any, float] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -417,7 +417,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, np.float32] = Field( + filtering: NDArray[Any, float] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -438,7 +438,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -447,7 +447,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -456,7 +456,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -547,7 +547,7 @@ class Subject(NWBContainer): age: Optional[str] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py index d7e6f39..f0a8769 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -738,7 +736,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py index 4bd8bd5..09f7cf1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -71,15 +71,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -94,15 +94,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -117,15 +117,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -142,11 +142,11 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -168,12 +168,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -284,24 +284,23 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -323,12 +322,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -356,7 +355,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -370,12 +369,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py index 4b5b92a..9cd8b01 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -441,7 +441,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -474,14 +474,14 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: Optional[str] = Field( @@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py index 06238c6..6f6995d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.number] = Field( + data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py index 76d1dae..8506e83 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -114,24 +114,21 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ - Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height"], np.float32], - ] + Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -153,12 +150,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -187,8 +184,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -206,12 +202,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -374,9 +370,9 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -403,10 +399,10 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -446,9 +442,7 @@ class OpticalChannel(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: np.float32 = Field( - ..., description="""Emission wavelength for channel, in nm.""" - ) + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index b204be8..e1dfcb7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py index defb8e9..4e739b3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -83,15 +83,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -143,12 +143,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -177,11 +177,11 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -212,11 +212,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py index 012e884..01ed338 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index 5f4bd2c..ef56d6a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -108,9 +108,9 @@ class ElectricalSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -119,7 +119,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -133,12 +133,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -167,10 +167,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -182,7 +182,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -196,7 +196,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -247,7 +247,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -277,12 +277,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -367,9 +367,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -388,7 +388,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -397,7 +397,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -424,17 +424,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py index e1b3da9..9ce71f4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -173,11 +173,11 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[np.int32] = Field( + idx_start: Optional[int] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: Optional[np.int32] = Field( + count: Optional[int] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py index d2a1e8f..22730c2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -118,7 +118,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -132,11 +132,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -372,7 +372,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, np.float32] = Field( + x: NDArray[Any, float] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -381,7 +381,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, np.float32] = Field( + y: NDArray[Any, float] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -390,7 +390,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, np.float32] = Field( + z: NDArray[Any, float] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -399,7 +399,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, np.float32] = Field( + imp: NDArray[Any, float] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -417,7 +417,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, np.float32] = Field( + filtering: NDArray[Any, float] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -438,7 +438,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -447,7 +447,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -456,7 +456,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -547,7 +547,7 @@ class Subject(NWBContainer): age: Optional[str] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py index 166ecb0..795f4b4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -255,21 +255,19 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -282,12 +280,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -319,10 +317,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -335,12 +333,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -411,10 +409,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -427,12 +425,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -488,7 +486,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -511,7 +509,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -534,7 +532,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -557,7 +555,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -580,7 +578,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -603,7 +601,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -626,7 +624,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -643,10 +641,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -659,12 +657,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -738,7 +736,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py index b74228e..7b0a102 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -71,15 +71,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -94,15 +94,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -117,15 +117,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -142,11 +142,11 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -168,12 +168,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -284,24 +284,23 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -323,12 +322,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -356,7 +355,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -370,12 +369,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py index 959d2df..6fff7db 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -296,12 +296,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -334,7 +334,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -370,7 +370,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -384,12 +384,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -441,7 +441,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -474,14 +474,14 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: Optional[str] = Field( @@ -517,7 +517,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py index 5b95cba..d6c46a3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.number] = Field( + data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py index 4a695c9..32129b6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -114,24 +114,23 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -153,12 +152,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -187,8 +186,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -206,12 +204,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -374,9 +372,9 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -403,10 +401,10 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -446,9 +444,7 @@ class OpticalChannel(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: np.float32 = Field( - ..., description="""Emission wavelength for channel, in nm.""" - ) + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index 4c3f758..fe8df70 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py index d6fc9fa..5d6e07d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -88,15 +88,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -148,12 +148,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -182,11 +182,11 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -221,11 +221,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py index b764c15..94f5843 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index 9cbedd2..c0b9368 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -112,9 +112,9 @@ class ElectricalSeries(TimeSeries): description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -123,7 +123,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -137,12 +137,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -171,10 +171,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -190,7 +190,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -204,7 +204,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -240,7 +240,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -255,7 +255,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -285,12 +285,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -375,9 +375,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -396,7 +396,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -405,7 +405,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -432,17 +432,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py index 6457839..1466993 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -170,11 +170,11 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[np.int32] = Field( + idx_start: Optional[int] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: Optional[np.int32] = Field( + count: Optional[int] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py index e4d19ca..cea7fa7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -118,7 +118,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -132,11 +132,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -372,7 +372,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, np.float32] = Field( + x: NDArray[Any, float] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -381,7 +381,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, np.float32] = Field( + y: NDArray[Any, float] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -390,7 +390,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, np.float32] = Field( + z: NDArray[Any, float] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -399,7 +399,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, np.float32] = Field( + imp: NDArray[Any, float] = Field( ..., description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -417,7 +417,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, np.float32] = Field( + filtering: NDArray[Any, float] = Field( ..., description="""Description of hardware filtering, including the filter name and frequency cutoffs.""", json_schema_extra={ @@ -438,7 +438,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -447,7 +447,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -456,7 +456,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -544,7 +544,7 @@ class Subject(NWBContainer): age: Optional[str] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py index 48af82c..db95e72 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -109,11 +109,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -126,12 +126,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -164,7 +164,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -180,18 +180,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -204,12 +204,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -259,18 +259,16 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", ) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -283,12 +281,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -320,10 +318,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -336,12 +334,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -412,10 +410,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -428,12 +426,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -489,7 +487,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -512,7 +510,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -535,7 +533,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -558,7 +556,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -581,7 +579,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -604,7 +602,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -627,7 +625,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -644,10 +642,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -660,12 +658,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -739,7 +737,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py index d54abe3..356fc99 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -71,15 +71,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -94,15 +94,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -117,15 +117,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -142,11 +142,11 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -168,12 +168,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -204,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -225,11 +225,11 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -251,12 +251,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -284,24 +284,23 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -323,12 +322,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -356,7 +355,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -370,12 +369,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py index 45a4d8b..01fcbf8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -303,12 +303,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -341,7 +341,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -377,7 +377,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -391,12 +391,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -445,7 +445,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -478,17 +478,17 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field( + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", json_schema_extra={ @@ -541,7 +541,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py index 71d202b..aa52534 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.number] = Field( + data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py index a08cfb2..7de8e48 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -114,24 +114,23 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Optional[ Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], ] ] = Field(None, description="""Binary data representing images across frames.""") - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -153,12 +152,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -187,8 +186,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -206,12 +204,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -371,9 +369,9 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -400,10 +398,10 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -443,9 +441,7 @@ class OpticalChannel(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: np.float32 = Field( - ..., description="""Emission wavelength for channel, in nm.""" - ) + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 6af3e96..2f9da18 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py index fe433cd..1dac0d5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -90,11 +90,11 @@ class TimeSeriesReferenceVectorData(VectorData): name: str = Field( "timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}} ) - idx_start: np.int32 = Field( + idx_start: int = Field( ..., description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: np.int32 = Field( + count: int = Field( ..., description="""Number of data samples available in this time series, during this epoch""", ) @@ -122,15 +122,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -182,12 +182,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -216,11 +216,11 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -255,11 +255,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py index 8e859b8..6077059 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,8 +133,8 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index 1a25b07..978abba 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -112,9 +112,9 @@ class ElectricalSeries(TimeSeries): description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -123,7 +123,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -137,12 +137,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -171,10 +171,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -190,7 +190,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -204,7 +204,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -240,7 +240,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -255,7 +255,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -285,12 +285,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -375,9 +375,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -396,7 +396,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -405,7 +405,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -432,17 +432,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 99ba895..0cd6107 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -170,11 +170,11 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[np.int32] = Field( + idx_start: Optional[int] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: Optional[np.int32] = Field( + count: Optional[int] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py index c2c5d26..211707f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -126,7 +126,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -140,11 +140,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -380,7 +380,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, np.float32] = Field( + x: NDArray[Any, float] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -389,7 +389,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, np.float32] = Field( + y: NDArray[Any, float] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -398,7 +398,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, np.float32] = Field( + z: NDArray[Any, float] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -407,7 +407,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, np.float32] = Field( + imp: NDArray[Any, float] = Field( ..., description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -446,7 +446,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -455,7 +455,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -464,7 +464,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -573,7 +573,7 @@ class Subject(NWBContainer): age: Optional[str] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py index b84fea1..760097b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -116,11 +116,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -133,12 +133,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -171,7 +171,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -187,18 +187,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -211,12 +211,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -266,18 +266,16 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", ) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -290,12 +288,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -327,10 +325,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -343,12 +341,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -419,10 +417,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -435,12 +433,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -496,7 +494,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -519,7 +517,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -542,7 +540,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -565,7 +563,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -588,7 +586,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -611,7 +609,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -634,7 +632,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -651,10 +649,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -667,12 +665,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -746,7 +744,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py index 1209210..e940464 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -71,15 +71,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -94,15 +94,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -117,15 +117,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -141,13 +141,12 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -169,12 +168,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -205,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -225,13 +224,12 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -253,12 +251,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -286,24 +284,23 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -325,12 +322,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -358,7 +355,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -372,12 +369,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py index 941eed4..540d30f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -303,12 +303,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -341,7 +341,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -377,7 +377,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -391,12 +391,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -445,7 +445,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -478,17 +478,17 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field( + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", json_schema_extra={ @@ -541,7 +541,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py index 2b145ce..8c72013 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.number] = Field( + data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 62d96b8..24dd9a9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -114,25 +114,23 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -154,12 +152,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -188,8 +186,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -207,12 +204,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -372,9 +369,9 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -401,10 +398,10 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -444,9 +441,7 @@ class OpticalChannel(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: np.float32 = Field( - ..., description="""Emission wavelength for channel, in nm.""" - ) + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index aaad019..501665e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py index a686488..96da748 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -114,11 +114,11 @@ class TimeSeriesReferenceVectorData(VectorData): name: str = Field( "timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}} ) - idx_start: np.int32 = Field( + idx_start: int = Field( ..., description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: np.int32 = Field( + count: int = Field( ..., description="""Number of data samples available in this time series, during this epoch""", ) @@ -146,15 +146,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -221,12 +221,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -255,15 +255,15 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - offset: Optional[np.float32] = Field( + offset: Optional[float] = Field( None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -298,11 +298,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py index 4c8757c..246b481 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,10 +133,10 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, 1 x"], np.number], - NDArray[Shape["* num_times, 2 x_y"], np.number], - NDArray[Shape["* num_times, 3 x_y_z"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, 1 x"], float], + NDArray[Shape["* num_times, 2 x_y"], float], + NDArray[Shape["* num_times, 3 x_y_z"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index f5f24cd..671682b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -112,9 +112,9 @@ class ElectricalSeries(TimeSeries): description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -123,7 +123,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -137,12 +137,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -171,10 +171,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -190,7 +190,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -204,7 +204,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -240,7 +240,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -255,7 +255,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -285,12 +285,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -375,9 +375,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -396,7 +396,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -405,7 +405,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -432,17 +432,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py index e55c2ea..e728288 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py index fb4442a..f71c621 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -127,7 +127,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -141,11 +141,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -383,7 +383,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: Optional[NDArray[Any, np.float32]] = Field( + x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -392,7 +392,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: Optional[NDArray[Any, np.float32]] = Field( + y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -401,7 +401,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: Optional[NDArray[Any, np.float32]] = Field( + z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -410,7 +410,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: Optional[NDArray[Any, np.float32]] = Field( + imp: Optional[NDArray[Any, float]] = Field( None, description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -449,7 +449,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -458,7 +458,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -467,7 +467,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -576,7 +576,7 @@ class Subject(NWBContainer): age: Optional[str] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py index 6dfee22..1b741f1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -116,11 +116,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -133,12 +133,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -171,7 +171,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -187,18 +187,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -211,12 +211,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -266,18 +266,16 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", ) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -290,12 +288,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -327,10 +325,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -343,12 +341,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -419,10 +417,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -435,12 +433,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -496,7 +494,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -519,7 +517,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -542,7 +540,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -565,7 +563,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -588,7 +586,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -611,7 +609,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -634,7 +632,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -651,10 +649,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -667,12 +665,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -747,7 +745,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py index 52ffddb..209487e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -71,15 +71,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -94,15 +94,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -117,15 +117,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -141,13 +141,12 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -169,12 +168,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -205,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -225,13 +224,12 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -253,12 +251,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -286,24 +284,23 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -325,12 +322,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -358,7 +355,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.uint32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the image (using zero-indexing) in the linked Images object.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -372,12 +369,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py index 3d0cd3b..11d9e44 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -303,12 +303,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -341,7 +341,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -377,7 +377,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -391,12 +391,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -445,7 +445,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -478,17 +478,17 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field( + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", json_schema_extra={ @@ -541,7 +541,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py index 618462b..88958c0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.number] = Field( + data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py index 518c841..41b48f1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -114,25 +114,23 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -154,12 +152,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -188,8 +186,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -207,12 +204,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -372,9 +369,9 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -401,10 +398,10 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -444,9 +441,7 @@ class OpticalChannel(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: np.float32 = Field( - ..., description="""Emission wavelength for channel, in nm.""" - ) + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index c8b182e..167c50f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index d890d13..13a1dcf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -114,11 +114,11 @@ class TimeSeriesReferenceVectorData(VectorData): name: str = Field( "timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}} ) - idx_start: np.int32 = Field( + idx_start: int = Field( ..., description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: np.int32 = Field( + count: int = Field( ..., description="""Number of data samples available in this time series, during this epoch""", ) @@ -146,15 +146,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -221,12 +221,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -255,15 +255,15 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - offset: Optional[np.float32] = Field( + offset: Optional[float] = Field( None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -298,11 +298,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index bc29452..d2321dd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,10 +133,10 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, 1 x"], np.number], - NDArray[Shape["* num_times, 2 x_y"], np.number], - NDArray[Shape["* num_times, 3 x_y_z"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, 1 x"], float], + NDArray[Shape["* num_times, 2 x_y"], float], + NDArray[Shape["* num_times, 3 x_y_z"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index 6dd5f69..694a64f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -112,9 +112,9 @@ class ElectricalSeries(TimeSeries): description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -123,7 +123,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -137,12 +137,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -171,10 +171,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -190,7 +190,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -204,7 +204,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -240,7 +240,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -255,7 +255,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -285,12 +285,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -375,9 +375,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -396,7 +396,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -405,7 +405,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -432,17 +432,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index 635e417..0073a74 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index f1e2354..453e761 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -127,7 +127,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -141,11 +141,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -383,7 +383,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: Optional[NDArray[Any, np.float32]] = Field( + x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -392,7 +392,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: Optional[NDArray[Any, np.float32]] = Field( + y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -401,7 +401,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: Optional[NDArray[Any, np.float32]] = Field( + z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -410,7 +410,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: Optional[NDArray[Any, np.float32]] = Field( + imp: Optional[NDArray[Any, float]] = Field( None, description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -449,7 +449,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -458,7 +458,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -467,7 +467,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -576,7 +576,7 @@ class Subject(NWBContainer): age: Optional[SubjectAge] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index c1feffe..c903549 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -116,11 +116,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -133,12 +133,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -171,7 +171,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -187,18 +187,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -211,12 +211,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -266,18 +266,16 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", ) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -290,12 +288,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -327,10 +325,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -343,12 +341,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -419,10 +417,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -435,12 +433,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -496,7 +494,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -519,7 +517,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -542,7 +540,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -565,7 +563,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -588,7 +586,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -611,7 +609,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -634,7 +632,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -651,10 +649,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -667,12 +665,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -747,7 +745,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index e1ee620..e69ff14 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -76,15 +76,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -99,15 +99,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -122,15 +122,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -146,13 +146,12 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -174,12 +173,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -210,7 +209,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -230,13 +229,12 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -258,12 +256,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -291,24 +289,23 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -330,12 +327,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -363,7 +360,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.uint32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the image (using zero-indexing) in the linked Images object.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -377,12 +374,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index a3861c2..428c0b1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -303,12 +303,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -341,7 +341,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -377,7 +377,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -391,12 +391,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -445,7 +445,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -478,17 +478,17 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field( + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", json_schema_extra={ @@ -541,7 +541,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index 8b0a950..419b1d0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -76,7 +76,7 @@ class OptogeneticSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.number] = Field( + data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -90,12 +90,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -124,7 +124,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index de696cd..7693cc6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -114,31 +114,28 @@ class OnePhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) - exposure_time: Optional[np.float32] = Field( + exposure_time: Optional[float] = Field( None, description="""Exposure time of the sample; often the inverse of the frequency.""" ) - binning: Optional[np.uint8] = Field( + binning: Optional[int] = Field( None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""" ) - power: Optional[np.float32] = Field( - None, description="""Power of the excitation in mW, if known.""" - ) - intensity: Optional[np.float32] = Field( + power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""") + intensity: Optional[float] = Field( None, description="""Intensity of the excitation in mW/mm^2, if known.""" ) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -160,12 +157,12 @@ class OnePhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -193,25 +190,23 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -233,12 +228,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -267,8 +262,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -286,12 +280,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -451,9 +445,9 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -480,10 +474,10 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -523,9 +517,7 @@ class OpticalChannel(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: np.float32 = Field( - ..., description="""Emission wavelength for channel, in nm.""" - ) + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index fd2509b..ce3ae04 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py index b76ba30..745ac40 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -114,11 +114,11 @@ class TimeSeriesReferenceVectorData(VectorData): name: str = Field( "timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}} ) - idx_start: np.int32 = Field( + idx_start: int = Field( ..., description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", ) - count: np.int32 = Field( + count: int = Field( ..., description="""Number of data samples available in this time series, during this epoch""", ) @@ -146,15 +146,15 @@ class Image(NWBData): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -221,12 +221,12 @@ class TimeSeries(NWBDataInterface): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -255,15 +255,15 @@ class TimeSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - conversion: Optional[np.float32] = Field( + conversion: Optional[float] = Field( None, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", ) - offset: Optional[np.float32] = Field( + offset: Optional[float] = Field( None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", ) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", ) @@ -298,11 +298,11 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[np.float32] = Field(None, description="""Sampling rate, in Hz.""") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") unit: Optional[str] = Field( None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" ) - value: np.float64 = Field(...) + value: float = Field(...) class TimeSeriesSync(ConfiguredBaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py index 991d017..304d675 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -93,12 +93,12 @@ class SpatialSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -133,10 +133,10 @@ class SpatialSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, 1 x"], np.number], - NDArray[Shape["* num_times, 2 x_y"], np.number], - NDArray[Shape["* num_times, 3 x_y_z"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, 1 x"], float], + NDArray[Shape["* num_times, 2 x_y"], float], + NDArray[Shape["* num_times, 3 x_y_z"], float], ] ] = Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index e11eaad..4b32f60 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -112,9 +112,9 @@ class ElectricalSeries(TimeSeries): description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", ) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_channels"], np.number], - NDArray[Shape["* num_times, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], ] = Field(..., description="""Recorded voltage data.""") electrodes: Named[DynamicTableRegion] = Field( ..., @@ -123,7 +123,7 @@ class ElectricalSeries(TimeSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -137,12 +137,12 @@ class ElectricalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -171,10 +171,10 @@ class SpikeEventSeries(ElectricalSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_events, * num_samples"], np.number], - NDArray[Shape["* num_events, * num_channels, * num_samples"], np.number], + NDArray[Shape["* num_events, * num_samples"], float], + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], ] = Field(..., description="""Spike waveforms.""") - timestamps: NDArray[Shape["* num_times"], np.float64] = Field( + timestamps: NDArray[Shape["* num_times"], float] = Field( ..., description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -190,7 +190,7 @@ class SpikeEventSeries(ElectricalSeries): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - channel_conversion: Optional[NDArray[Shape["* num_channels"], np.float32]] = Field( + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, @@ -204,7 +204,7 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -240,7 +240,7 @@ class FeatureExtraction(NWBDataInterface): description="""Description of features (eg, ''PC1'') for each of the extracted features.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], np.float32] = Field( + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( ..., description="""Multi-dimensional array of features extracted from each event.""", json_schema_extra={ @@ -255,7 +255,7 @@ class FeatureExtraction(NWBDataInterface): } }, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of events that features correspond to (can be a link).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -285,12 +285,12 @@ class EventDetection(NWBDataInterface): ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", ) - source_idx: NDArray[Shape["* num_events"], np.int32] = Field( + source_idx: NDArray[Shape["* num_events"], int] = Field( ..., description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, @@ -375,9 +375,9 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[np.float32] = Field(None, description="""x coordinate""") - y: Optional[np.float32] = Field(None, description="""y coordinate""") - z: Optional[np.float32] = Field(None, description="""z coordinate""") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") class ClusterWaveforms(NWBDataInterface): @@ -396,7 +396,7 @@ class ClusterWaveforms(NWBDataInterface): waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) - waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", json_schema_extra={ @@ -405,7 +405,7 @@ class ClusterWaveforms(NWBDataInterface): } }, ) - waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], np.float32] = Field( + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( ..., description="""Stdev of waveforms for each cluster, using the same indices as in mean""", json_schema_extra={ @@ -432,17 +432,17 @@ class Clustering(NWBDataInterface): ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", ) - num: NDArray[Shape["* num_events"], np.int32] = Field( + num: NDArray[Shape["* num_events"], int] = Field( ..., description="""Cluster number of each event""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) - peak_over_rms: NDArray[Shape["* num_clusters"], np.float32] = Field( + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( ..., description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_clusters"}]}}}, ) - times: NDArray[Shape["* num_events"], np.float64] = Field( + times: NDArray[Shape["* num_events"], float] = Field( ..., description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py index 2d844c9..a715324 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -96,7 +96,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, np.float32] = Field( + start_time: NDArray[Any, float] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, np.float32] = Field( + stop_time: NDArray[Any, float] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py index 47cb4ad..3ccc777 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -127,7 +127,7 @@ class NWBFile(NWBContainer): None, description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", ) - file_create_date: NDArray[Shape["* num_modifications"], np.datetime64] = Field( + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", json_schema_extra={ @@ -141,11 +141,11 @@ class NWBFile(NWBContainer): session_description: str = Field( ..., description="""A description of the experimental session and data in the file.""" ) - session_start_time: np.datetime64 = Field( + session_start_time: datetime = Field( ..., description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", ) - timestamps_reference_time: np.datetime64 = Field( + timestamps_reference_time: datetime = Field( ..., description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", ) @@ -391,7 +391,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: Optional[NDArray[Any, np.float32]] = Field( + x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -400,7 +400,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - y: Optional[NDArray[Any, np.float32]] = Field( + y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -409,7 +409,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - z: Optional[NDArray[Any, np.float32]] = Field( + z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -418,7 +418,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: Optional[NDArray[Any, np.float32]] = Field( + imp: Optional[NDArray[Any, float]] = Field( None, description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -457,7 +457,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, np.float32]] = Field( + rel_x: Optional[NDArray[Any, float]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -466,7 +466,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, np.float32]] = Field( + rel_y: Optional[NDArray[Any, float]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -475,7 +475,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, np.float32]] = Field( + rel_z: Optional[NDArray[Any, float]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -584,7 +584,7 @@ class Subject(NWBContainer): age: Optional[SubjectAge] = Field( None, description="""Age of subject. Can be supplied instead of 'date_of_birth'.""" ) - date_of_birth: Optional[np.datetime64] = Field( + date_of_birth: Optional[datetime] = Field( None, description="""Date of birth of subject. Can be supplied instead of 'age'.""" ) description: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py index b651db2..b84a7f0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -116,11 +116,11 @@ class PatchClampSeries(TimeSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) data: PatchClampSeriesData = Field(..., description="""Recorded voltage or current.""") - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -133,12 +133,12 @@ class PatchClampSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -171,7 +171,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - array: Optional[NDArray[Shape["* num_times"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -187,18 +187,18 @@ class CurrentClampSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - bias_current: Optional[np.float32] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[np.float32] = Field(None, description="""Bridge balance, in ohms.""") - capacitance_compensation: Optional[np.float32] = Field( + bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") + bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -211,12 +211,12 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -266,18 +266,16 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", ) - bias_current: np.float32 = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: np.float32 = Field( - ..., description="""Bridge balance, in ohms, fixed to 0.0.""" - ) - capacitance_compensation: np.float32 = Field( + bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") + bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -290,12 +288,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -327,10 +325,10 @@ class CurrentClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -343,12 +341,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -419,10 +417,10 @@ class VoltageClampSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -435,12 +433,12 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -496,7 +494,7 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): @@ -519,7 +517,7 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): None, description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): @@ -542,7 +540,7 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): @@ -565,7 +563,7 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): @@ -588,7 +586,7 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): None, description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): @@ -611,7 +609,7 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): @@ -634,7 +632,7 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", ) - value: np.float32 = Field(...) + value: float = Field(...) class VoltageClampStimulusSeries(PatchClampSeries): @@ -651,10 +649,10 @@ class VoltageClampStimulusSeries(PatchClampSeries): stimulus_description: Optional[str] = Field( None, description="""Protocol/stimulus name for this patch-clamp dataset.""" ) - sweep_number: Optional[np.uint32] = Field( + sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" ) - gain: Optional[np.float32] = Field( + gain: Optional[float] = Field( None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) @@ -667,12 +665,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -747,7 +745,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, np.uint32] = Field( + sweep_number: NDArray[Any, int] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py index 7377214..9fff36e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -71,15 +71,15 @@ class GrayscaleImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -94,15 +94,15 @@ class RGBImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -117,15 +117,15 @@ class RGBAImage(Image): ) name: str = Field(...) - resolution: Optional[np.float32] = Field( + resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") array: Optional[ Union[ - NDArray[Shape["* x, * y"], np.number], - NDArray[Shape["* x, * y, 3 r_g_b"], np.number], - NDArray[Shape["* x, * y, 4 r_g_b_a"], np.number], + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r_g_b"], float], + NDArray[Shape["* x, * y, 4 r_g_b_a"], float], ] ] = Field(None) @@ -141,13 +141,12 @@ class ImageSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -169,12 +168,12 @@ class ImageSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -205,7 +204,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[np.int32] = Field( + starting_frame: Optional[int] = Field( None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) @@ -225,13 +224,12 @@ class ImageMaskSeries(ImageSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -253,12 +251,12 @@ class ImageMaskSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -286,24 +284,23 @@ class OpticalSeries(ImageSeries): ) name: str = Field(...) - distance: Optional[np.float32] = Field( + distance: Optional[float] = Field( None, description="""Distance from camera/monitor to target/eye.""" ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, 3 r_g_b"], np.number], + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r_g_b"], float], ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -325,12 +322,12 @@ class OpticalSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -358,7 +355,7 @@ class IndexSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.uint32] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the image (using zero-indexing) in the linked Images object.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -372,12 +369,12 @@ class IndexSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py index 0cfd65b..7d8bcb2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -129,12 +129,12 @@ class AbstractFeatureSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -169,8 +169,8 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): ) array: Optional[ Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_features"], np.number], + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], ] ] = Field(None) @@ -199,12 +199,12 @@ class AnnotationSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -232,7 +232,7 @@ class IntervalSeries(TimeSeries): ) name: str = Field(...) - data: NDArray[Shape["* num_times"], np.int8] = Field( + data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -246,12 +246,12 @@ class IntervalSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -303,12 +303,12 @@ class DecompositionSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -341,7 +341,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], np.number]] = Field( + array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -377,7 +377,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], np.float32] = Field( + band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -391,12 +391,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], np.float32] = Field( + band_mean: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], np.float32] = Field( + band_stdev: NDArray[Shape["* num_bands"], float] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -445,7 +445,7 @@ class Units(DynamicTable): "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], np.float64]] = Field( + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( None, description="""Observation intervals for each unit.""", json_schema_extra={ @@ -478,17 +478,17 @@ class Units(DynamicTable): ) waveform_mean: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") waveform_sd: Optional[ Union[ - NDArray[Shape["* num_units, * num_samples"], np.float32], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], np.float32], + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], np.number]] = Field( + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( None, description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", json_schema_extra={ @@ -541,7 +541,7 @@ class UnitsSpikeTimes(VectorData): "linkml_meta": {"equals_string": "spike_times", "ifabsent": "string(spike_times)"} }, ) - resolution: Optional[np.float64] = Field( + resolution: Optional[float] = Field( None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py index 46d16a7..4e0da1a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -77,8 +77,7 @@ class OptogeneticSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field( ..., description="""Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents.""", @@ -92,12 +91,12 @@ class OptogeneticSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -126,7 +125,7 @@ class OptogeneticStimulusSite(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: np.float32 = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py index bfd5c4e..789a327 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -114,31 +114,28 @@ class OnePhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) - exposure_time: Optional[np.float32] = Field( + exposure_time: Optional[float] = Field( None, description="""Exposure time of the sample; often the inverse of the frequency.""" ) - binning: Optional[np.uint8] = Field( + binning: Optional[int] = Field( None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""" ) - power: Optional[np.float32] = Field( - None, description="""Power of the excitation in mW, if known.""" - ) - intensity: Optional[np.float32] = Field( + power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""") + intensity: Optional[float] = Field( None, description="""Intensity of the excitation in mW/mm^2, if known.""" ) data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -160,12 +157,12 @@ class OnePhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -193,25 +190,23 @@ class TwoPhotonSeries(ImageSeries): ) name: str = Field(...) - pmt_gain: Optional[np.float32] = Field(None, description="""Photomultiplier gain.""") - scan_line_rate: Optional[np.float32] = Field( + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( None, description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", ) field_of_view: Optional[ Union[ - NDArray[Shape["2 width_height"], np.float32], - NDArray[Shape["3 width_height_depth"], np.float32], + NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") data: Union[ - NDArray[Shape["* frame, * x, * y"], np.number], - NDArray[Shape["* frame, * x, * y, * z"], np.number], + NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( ..., description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", ) - dimension: Optional[NDArray[Shape["* rank"], np.int32]] = Field( + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( None, description="""Number of pixels on x, y, (and z) axes.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "rank"}]}}}, @@ -233,12 +228,12 @@ class TwoPhotonSeries(ImageSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -267,8 +262,7 @@ class RoiResponseSeries(TimeSeries): name: str = Field(...) data: Union[ - NDArray[Shape["* num_times"], np.number], - NDArray[Shape["* num_times, * num_rois"], np.number], + NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_rois"], float] ] = Field(..., description="""Signals from ROIs.""") rois: Named[DynamicTableRegion] = Field( ..., @@ -286,12 +280,12 @@ class RoiResponseSeries(TimeSeries): None, description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", ) - timestamps: Optional[NDArray[Shape["* num_times"], np.float64]] = Field( + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( None, description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - control: Optional[NDArray[Shape["* num_times"], np.uint8]] = Field( + control: Optional[NDArray[Shape["* num_times"], int]] = Field( None, description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, @@ -451,9 +445,9 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the pixel.""") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -480,10 +474,10 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[np.uint32] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[np.uint32] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[np.uint32] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[np.float32] = Field(None, description="""Weight of the voxel.""") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -523,9 +517,7 @@ class OpticalChannel(NWBContainer): name: str = Field(...) description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: np.float32 = Field( - ..., description="""Emission wavelength for channel, in nm.""" - ) + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py index f65ed6c..402cc40 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -127,17 +127,15 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,17 +159,15 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -195,17 +191,15 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -229,17 +223,15 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -263,24 +255,20 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - focal_depth: Optional[np.float32] = Field( - None, description="""Focal depth offset, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -301,14 +289,12 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.float32]] = Field( + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -332,21 +318,19 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[np.int32] = Field( + bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[np.int32] = Field( + dimension: Optional[int] = Field( None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[np.float32] = Field( - None, description="""Size of viewing area, in meters.""" - ) + field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], np.uint16]] = Field( + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index c6e8f35..b212cef 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -66,7 +66,7 @@ class VectorDataMixin(BaseModel): else: return self.array[item] - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -83,7 +83,7 @@ class VectorIndexMixin(BaseModel): array: Optional[NDArray] = None target: Optional["VectorData"] = None - def _getitem_helper(self, arg: int): + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ @@ -104,7 +104,7 @@ class VectorIndexMixin(BaseModel): else: raise NotImplementedError("DynamicTableRange not supported yet") - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: # VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -121,7 +121,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -132,15 +132,15 @@ class DynamicTableMixin(BaseModel): colnames: List[str] = Field(default_factory=list) @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: return [getattr(self, k) for i, k in enumerate(self.colnames)] @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload def __getitem__(self, item: int) -> DataFrame: ... @@ -153,7 +153,7 @@ class DynamicTableMixin(BaseModel): DataFrame, list, "NDArray", - "VectorData", + "VectorDataMixin", ]: ... @overload @@ -231,7 +231,7 @@ class DynamicTableMixin(BaseModel): @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]): + def create_colnames(cls, model: Dict[str, Any]) -> None: """ Construct colnames from arguments. @@ -240,19 +240,17 @@ class DynamicTableMixin(BaseModel): """ if "colnames" not in model: colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") ] model["colnames"] = colnames else: # add any columns not explicitly given an order at the end colnames = [ k - for k in model.keys() + for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"].keys() + and k not in model["colnames"] ] model["colnames"].extend(colnames) return model @@ -269,13 +267,11 @@ class DynamicTableMixin(BaseModel): for field_name in self.model_fields_set: # implicit name-based index field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break if idx is not None: col._index = idx idx.target = col diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 44209ba..9d1bdb6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -66,7 +66,7 @@ class VectorDataMixin(BaseModel): else: return self.array[item] - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -83,7 +83,7 @@ class VectorIndexMixin(BaseModel): array: Optional[NDArray] = None target: Optional["VectorData"] = None - def _getitem_helper(self, arg: int): + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ @@ -104,7 +104,7 @@ class VectorIndexMixin(BaseModel): else: raise NotImplementedError("DynamicTableRange not supported yet") - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: # VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -121,7 +121,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -132,15 +132,15 @@ class DynamicTableMixin(BaseModel): colnames: List[str] = Field(default_factory=list) @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: return [getattr(self, k) for i, k in enumerate(self.colnames)] @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload def __getitem__(self, item: int) -> DataFrame: ... @@ -153,7 +153,7 @@ class DynamicTableMixin(BaseModel): DataFrame, list, "NDArray", - "VectorData", + "VectorDataMixin", ]: ... @overload @@ -231,7 +231,7 @@ class DynamicTableMixin(BaseModel): @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]): + def create_colnames(cls, model: Dict[str, Any]) -> None: """ Construct colnames from arguments. @@ -240,19 +240,17 @@ class DynamicTableMixin(BaseModel): """ if "colnames" not in model: colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") ] model["colnames"] = colnames else: # add any columns not explicitly given an order at the end colnames = [ k - for k in model.keys() + for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"].keys() + and k not in model["colnames"] ] model["colnames"].extend(colnames) return model @@ -269,13 +267,11 @@ class DynamicTableMixin(BaseModel): for field_name in self.model_fields_set: # implicit name-based index field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break if idx is not None: col._index = idx idx.target = col diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 703ff6c..dbca48c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -66,7 +66,7 @@ class VectorDataMixin(BaseModel): else: return self.array[item] - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -83,7 +83,7 @@ class VectorIndexMixin(BaseModel): array: Optional[NDArray] = None target: Optional["VectorData"] = None - def _getitem_helper(self, arg: int): + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ @@ -104,7 +104,7 @@ class VectorIndexMixin(BaseModel): else: raise NotImplementedError("DynamicTableRange not supported yet") - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: # VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -121,7 +121,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -132,15 +132,15 @@ class DynamicTableMixin(BaseModel): colnames: List[str] = Field(default_factory=list) @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: return [getattr(self, k) for i, k in enumerate(self.colnames)] @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload def __getitem__(self, item: int) -> DataFrame: ... @@ -153,7 +153,7 @@ class DynamicTableMixin(BaseModel): DataFrame, list, "NDArray", - "VectorData", + "VectorDataMixin", ]: ... @overload @@ -231,7 +231,7 @@ class DynamicTableMixin(BaseModel): @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]): + def create_colnames(cls, model: Dict[str, Any]) -> None: """ Construct colnames from arguments. @@ -240,19 +240,17 @@ class DynamicTableMixin(BaseModel): """ if "colnames" not in model: colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") ] model["colnames"] = colnames else: # add any columns not explicitly given an order at the end colnames = [ k - for k in model.keys() + for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"].keys() + and k not in model["colnames"] ] model["colnames"].extend(colnames) return model @@ -269,13 +267,11 @@ class DynamicTableMixin(BaseModel): for field_name in self.model_fields_set: # implicit name-based index field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break if idx is not None: col._index = idx idx.target = col diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py deleted file mode 100644 index 1d657d9..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py +++ /dev/null @@ -1,88 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np - -metamodel_version = "None" -version = "1.2.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.base/", - "id": "hdmf-common.base", - "imports": ["hdmf-common.nwb.language"], - "name": "hdmf-common.base", - } -) - - -class Data(ConfiguredBaseModel): - """ - An abstract data type for a dataset. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class Container(ConfiguredBaseModel): - """ - An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Data.model_rebuild() -Container.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py deleted file mode 100644 index 31afdb0..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py +++ /dev/null @@ -1,125 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np - -metamodel_version = "None" -version = "1.2.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.sparse/", - "id": "hdmf-common.sparse", - "imports": ["hdmf-common.nwb.language"], - "name": "hdmf-common.sparse", - } -) - - -class CSRMatrix(ConfiguredBaseModel): - """ - a compressed sparse row matrix - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.sparse", "tree_root": True} - ) - - name: str = Field(...) - shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") - indices: CSRMatrixIndices = Field(..., description="""column indices""") - indptr: CSRMatrixIndptr = Field(..., description="""index pointer""") - data: CSRMatrixData = Field(..., description="""values in the matrix""") - - -class CSRMatrixIndices(ConfiguredBaseModel): - """ - column indices - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["indices"] = Field( - "indices", - json_schema_extra={ - "linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"} - }, - ) - - -class CSRMatrixIndptr(ConfiguredBaseModel): - """ - index pointer - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["indptr"] = Field( - "indptr", - json_schema_extra={ - "linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"} - }, - ) - - -class CSRMatrixData(ConfiguredBaseModel): - """ - values in the matrix - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -CSRMatrix.model_rebuild() -CSRMatrixIndices.model_rebuild() -CSRMatrixIndptr.model_rebuild() -CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py deleted file mode 100644 index 6ded400..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ /dev/null @@ -1,449 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -import numpy as np -from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container -from pandas import DataFrame -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator - -metamodel_version = "None" -version = "1.2.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" - - -class VectorDataMixin(BaseModel): - """ - Mixin class to give VectorData indexing abilities - """ - - _index: Optional["VectorIndex"] = None - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - return self._index[item] - else: - return self.array[item] - - def __setitem__(self, key, value) -> None: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class VectorIndexMixin(BaseModel): - """ - Mixin class to give VectorIndex indexing abilities - """ - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - target: Optional["VectorData"] = None - - def _getitem_helper(self, arg: int): - """ - Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` - """ - - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] - return self.target.array[slice(start, end)] - - def __getitem__(self, item: Union[int, slice]) -> Any: - if self.target is None: - return self.array[item] - elif type(self.target).__name__ == "VectorData": - if isinstance(item, int): - return self._getitem_helper(item) - else: - idx = range(*item.indices(len(self.array))) - return [self._getitem_helper(i) for i in idx] - else: - raise NotImplementedError("DynamicTableRange not supported yet") - - def __setitem__(self, key, value) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class DynamicTableMixin(BaseModel): - """ - Mixin to make DynamicTable subclasses behave like tables/dataframes - - Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` - but simplifying along the way :) - """ - - model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] - NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( - "name", - "colnames", - "description", - ) - - # overridden by subclass but implemented here for testing and typechecking purposes :) - colnames: List[str] = Field(default_factory=list) - - @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - - @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... - - @overload - def __getitem__(self, item: int) -> DataFrame: ... - - @overload - def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... - - @overload - def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, - list, - "NDArray", - "VectorData", - ]: ... - - @overload - def __getitem__(self, item: slice) -> DataFrame: ... - - def __getitem__( - self, - item: Union[ - str, - int, - slice, - Tuple[int, Union[int, str]], - Tuple[Union[int, slice], ...], - ], - ) -> Any: - """ - Get an item from the table - - If item is... - - - ``str`` : get the column with this name - - ``int`` : get the row at this index - - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column - - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') - gets the 0th row from ``colname`` - - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. - returns as a :class:`pandas.DataFrame` - """ - if isinstance(item, str): - return self._columns[item] - if isinstance(item, (int, slice)): - return DataFrame.from_dict(self._slice_range(item)) - elif isinstance(item, tuple): - if len(item) != 2: - raise ValueError( - "DynamicTables are 2-dimensional, can't index with more than 2 indices like" - f" {item}" - ) - - # all other cases are tuples of (rows, cols) - rows, cols = item - if isinstance(cols, (int, slice)): - cols = self.colnames[cols] - data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) - else: - raise ValueError(f"Unsure how to get item with key {item}") - - def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None - ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - if cols is None: - cols = self.colnames - elif isinstance(cols, str): - cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} - return data - - def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") - - def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): - """ - Add a column, appending it to ``colnames`` - """ - # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): - return super().__setattr__(key, value) - - if key not in self.model_fields_set and not key.endswith("_index"): - self.colnames.append(key) - - return super().__setattr__(key, value) - - @model_validator(mode="before") - @classmethod - def create_colnames(cls, model: Dict[str, Any]): - """ - Construct colnames from arguments. - - the model dict is ordered after python3.6, so we can use that minus - anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order - """ - if "colnames" not in model: - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS - and not k.endswith("_index") - and k not in model["colnames"].keys() - ] - model["colnames"].extend(colnames) - return model - - @model_validator(mode="after") - def resolve_targets(self) -> "DynamicTableMixin": - """ - Ensure that any implicitly indexed columns are linked, and create backlinks - """ - for key, col in self._columns.items(): - if isinstance(col, VectorData): - # find an index - idx = None - for field_name in self.model_fields_set: - # implicit name-based index - field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break - if idx is not None: - col._index = idx - idx.target = col - return self - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.table/", - "id": "hdmf-common.table", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.table", - } -) - - -class VectorData(VectorDataMixin): - """ - An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VectorIndex(VectorIndexMixin): - """ - Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class ElementIdentifiers(Data): - """ - A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field( - "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} - ) - - -class DynamicTableRegion(VectorData): - """ - DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VocabData(VectorData): - """ - Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - vocabulary: Optional[str] = Field( - None, description="""The available items in the controlled vocabulary.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class DynamicTable(DynamicTableMixin): - """ - A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -VectorData.model_rebuild() -VectorIndex.model_rebuild() -ElementIdentifiers.model_rebuild() -DynamicTableRegion.model_rebuild() -VocabData.model_rebuild() -DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py deleted file mode 100644 index 62d22cb..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py +++ /dev/null @@ -1,83 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_2_0.hdmf_common_sparse import ( - CSRMatrix, - CSRMatrixIndices, - CSRMatrixIndptr, - CSRMatrixData, -) -from ...hdmf_common.v1_2_0.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - VocabData, - DynamicTable, -) -from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container - -metamodel_version = "None" -version = "1.2.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common/", - "description": "Common data structures provided by HDMF", - "id": "hdmf-common", - "imports": [ - "hdmf-common.base", - "hdmf-common.table", - "hdmf-common.sparse", - "hdmf-common.nwb.language", - ], - "name": "hdmf-common", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py deleted file mode 100644 index c891ed8..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py +++ /dev/null @@ -1,104 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np - -metamodel_version = "None" -version = "1.2.1" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.base/", - "id": "hdmf-common.base", - "imports": ["hdmf-common.nwb.language"], - "name": "hdmf-common.base", - } -) - - -class Data(ConfiguredBaseModel): - """ - An abstract data type for a dataset. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class Container(ConfiguredBaseModel): - """ - An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class SimpleMultiContainer(Container): - """ - A simple Container for holding onto multiple containers - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - children: Optional[List[Container]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} - ) - name: str = Field(...) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Data.model_rebuild() -Container.model_rebuild() -SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py deleted file mode 100644 index 9e2e7ce..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py +++ /dev/null @@ -1,126 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_2_1.hdmf_common_base import Container - -metamodel_version = "None" -version = "1.2.1" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.sparse/", - "id": "hdmf-common.sparse", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.sparse", - } -) - - -class CSRMatrix(Container): - """ - a compressed sparse row matrix - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.sparse", "tree_root": True} - ) - - name: str = Field(...) - shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") - indices: CSRMatrixIndices = Field(..., description="""column indices""") - indptr: CSRMatrixIndptr = Field(..., description="""index pointer""") - data: CSRMatrixData = Field(..., description="""values in the matrix""") - - -class CSRMatrixIndices(ConfiguredBaseModel): - """ - column indices - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["indices"] = Field( - "indices", - json_schema_extra={ - "linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"} - }, - ) - - -class CSRMatrixIndptr(ConfiguredBaseModel): - """ - index pointer - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["indptr"] = Field( - "indptr", - json_schema_extra={ - "linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"} - }, - ) - - -class CSRMatrixData(ConfiguredBaseModel): - """ - values in the matrix - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -CSRMatrix.model_rebuild() -CSRMatrixIndices.model_rebuild() -CSRMatrixIndptr.model_rebuild() -CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py deleted file mode 100644 index fd4377f..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ /dev/null @@ -1,449 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -import numpy as np -from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container -from pandas import DataFrame -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator - -metamodel_version = "None" -version = "1.2.1" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" - - -class VectorDataMixin(BaseModel): - """ - Mixin class to give VectorData indexing abilities - """ - - _index: Optional["VectorIndex"] = None - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - return self._index[item] - else: - return self.array[item] - - def __setitem__(self, key, value) -> None: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class VectorIndexMixin(BaseModel): - """ - Mixin class to give VectorIndex indexing abilities - """ - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - target: Optional["VectorData"] = None - - def _getitem_helper(self, arg: int): - """ - Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` - """ - - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] - return self.target.array[slice(start, end)] - - def __getitem__(self, item: Union[int, slice]) -> Any: - if self.target is None: - return self.array[item] - elif type(self.target).__name__ == "VectorData": - if isinstance(item, int): - return self._getitem_helper(item) - else: - idx = range(*item.indices(len(self.array))) - return [self._getitem_helper(i) for i in idx] - else: - raise NotImplementedError("DynamicTableRange not supported yet") - - def __setitem__(self, key, value) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class DynamicTableMixin(BaseModel): - """ - Mixin to make DynamicTable subclasses behave like tables/dataframes - - Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` - but simplifying along the way :) - """ - - model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] - NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( - "name", - "colnames", - "description", - ) - - # overridden by subclass but implemented here for testing and typechecking purposes :) - colnames: List[str] = Field(default_factory=list) - - @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - - @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... - - @overload - def __getitem__(self, item: int) -> DataFrame: ... - - @overload - def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... - - @overload - def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, - list, - "NDArray", - "VectorData", - ]: ... - - @overload - def __getitem__(self, item: slice) -> DataFrame: ... - - def __getitem__( - self, - item: Union[ - str, - int, - slice, - Tuple[int, Union[int, str]], - Tuple[Union[int, slice], ...], - ], - ) -> Any: - """ - Get an item from the table - - If item is... - - - ``str`` : get the column with this name - - ``int`` : get the row at this index - - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column - - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') - gets the 0th row from ``colname`` - - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. - returns as a :class:`pandas.DataFrame` - """ - if isinstance(item, str): - return self._columns[item] - if isinstance(item, (int, slice)): - return DataFrame.from_dict(self._slice_range(item)) - elif isinstance(item, tuple): - if len(item) != 2: - raise ValueError( - "DynamicTables are 2-dimensional, can't index with more than 2 indices like" - f" {item}" - ) - - # all other cases are tuples of (rows, cols) - rows, cols = item - if isinstance(cols, (int, slice)): - cols = self.colnames[cols] - data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) - else: - raise ValueError(f"Unsure how to get item with key {item}") - - def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None - ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - if cols is None: - cols = self.colnames - elif isinstance(cols, str): - cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} - return data - - def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") - - def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): - """ - Add a column, appending it to ``colnames`` - """ - # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): - return super().__setattr__(key, value) - - if key not in self.model_fields_set and not key.endswith("_index"): - self.colnames.append(key) - - return super().__setattr__(key, value) - - @model_validator(mode="before") - @classmethod - def create_colnames(cls, model: Dict[str, Any]): - """ - Construct colnames from arguments. - - the model dict is ordered after python3.6, so we can use that minus - anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order - """ - if "colnames" not in model: - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS - and not k.endswith("_index") - and k not in model["colnames"].keys() - ] - model["colnames"].extend(colnames) - return model - - @model_validator(mode="after") - def resolve_targets(self) -> "DynamicTableMixin": - """ - Ensure that any implicitly indexed columns are linked, and create backlinks - """ - for key, col in self._columns.items(): - if isinstance(col, VectorData): - # find an index - idx = None - for field_name in self.model_fields_set: - # implicit name-based index - field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break - if idx is not None: - col._index = idx - idx.target = col - return self - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.table/", - "id": "hdmf-common.table", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.table", - } -) - - -class VectorData(VectorDataMixin): - """ - An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VectorIndex(VectorIndexMixin): - """ - Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class ElementIdentifiers(Data): - """ - A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field( - "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} - ) - - -class DynamicTableRegion(VectorData): - """ - DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VocabData(VectorData): - """ - Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - vocabulary: Optional[str] = Field( - None, description="""The available items in the controlled vocabulary.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class DynamicTable(DynamicTableMixin): - """ - A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -VectorData.model_rebuild() -VectorIndex.model_rebuild() -ElementIdentifiers.model_rebuild() -DynamicTableRegion.model_rebuild() -VocabData.model_rebuild() -DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py deleted file mode 100644 index 55f5dc6..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py +++ /dev/null @@ -1,83 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_2_1.hdmf_common_sparse import ( - CSRMatrix, - CSRMatrixIndices, - CSRMatrixIndptr, - CSRMatrixData, -) -from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_2_1.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - VocabData, - DynamicTable, -) - -metamodel_version = "None" -version = "1.2.1" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common/", - "description": "Common data structures provided by HDMF", - "id": "hdmf-common", - "imports": [ - "hdmf-common.base", - "hdmf-common.table", - "hdmf-common.sparse", - "hdmf-common.nwb.language", - ], - "name": "hdmf-common", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py deleted file mode 100644 index 63bbcf2..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py +++ /dev/null @@ -1,104 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np - -metamodel_version = "None" -version = "1.3.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.base/", - "id": "hdmf-common.base", - "imports": ["hdmf-common.nwb.language"], - "name": "hdmf-common.base", - } -) - - -class Data(ConfiguredBaseModel): - """ - An abstract data type for a dataset. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class Container(ConfiguredBaseModel): - """ - An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class SimpleMultiContainer(Container): - """ - A simple Container for holding onto multiple containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - children: Optional[List[Container]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} - ) - name: str = Field(...) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Data.model_rebuild() -Container.model_rebuild() -SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py deleted file mode 100644 index 81f3031..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py +++ /dev/null @@ -1,181 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data - -metamodel_version = "None" -version = "1.3.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.resources/", - "id": "hdmf-common.resources", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.resources", - } -) - - -class ExternalResources(Container): - """ - A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.resources", "tree_root": True} - ) - - name: str = Field(...) - keys: ExternalResourcesKeys = Field( - ..., - description="""A table for storing user terms that are used to refer to external resources.""", - ) - resources: ExternalResourcesResources = Field( - ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" - ) - objects: ExternalResourcesObjects = Field( - ..., - description="""A table for identifying which objects in a file contain references to external resources.""", - ) - object_keys: ExternalResourcesObjectKeys = Field( - ..., description="""A table for identifying which objects use which keys.""" - ) - - -class ExternalResourcesKeys(Data): - """ - A table for storing user terms that are used to refer to external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) - - name: Literal["keys"] = Field( - "keys", - json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, - ) - key_name: str = Field( - ..., - description="""The user term that maps to one or more resources in the 'resources' table.""", - ) - - -class ExternalResourcesResources(Data): - """ - A table for mapping user terms (i.e., keys) to resource entities. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) - - name: Literal["resources"] = Field( - "resources", - json_schema_extra={ - "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"} - }, - ) - keytable_idx: np.uint64 = Field( - ..., description="""The index to the key in the 'keys' table.""" - ) - resource_name: str = Field( - ..., - description="""The name of the online resource (e.g., website, database) that has the entity.""", - ) - resource_id: str = Field( - ..., description="""The unique identifier for the resource entity at the resource.""" - ) - uri: str = Field( - ..., - description="""The URI for the resource entity this reference applies to. This can be an empty string.""", - ) - - -class ExternalResourcesObjects(Data): - """ - A table for identifying which objects in a file contain references to external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) - - name: Literal["objects"] = Field( - "objects", - json_schema_extra={ - "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} - }, - ) - object_id: str = Field(..., description="""The UUID for the object.""") - field: str = Field( - ..., - description="""The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values.""", - ) - - -class ExternalResourcesObjectKeys(Data): - """ - A table for identifying which objects use which keys. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) - - name: Literal["object_keys"] = Field( - "object_keys", - json_schema_extra={ - "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} - }, - ) - objecttable_idx: np.uint64 = Field( - ..., description="""The index to the 'objects' table for the object that holds the key.""" - ) - keytable_idx: np.uint64 = Field( - ..., description="""The index to the 'keys' table for the key.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -ExternalResources.model_rebuild() -ExternalResourcesKeys.model_rebuild() -ExternalResourcesResources.model_rebuild() -ExternalResourcesObjects.model_rebuild() -ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py deleted file mode 100644 index fe3047c..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py +++ /dev/null @@ -1,110 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_3_0.hdmf_common_base import Container -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "1.3.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.sparse/", - "id": "hdmf-common.sparse", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.sparse", - } -) - - -class CSRMatrix(Container): - """ - A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.sparse", "tree_root": True} - ) - - name: str = Field(...) - shape: Optional[np.uint64] = Field( - None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" - ) - indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( - ..., - description="""The column indices.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} - }, - ) - indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( - ..., - description="""The row index pointer.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} - }, - ) - data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") - - -class CSRMatrixData(ConfiguredBaseModel): - """ - The non-zero values in the matrix. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -CSRMatrix.model_rebuild() -CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py deleted file mode 100644 index 7f3f848..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ /dev/null @@ -1,449 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -import numpy as np -from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container -from pandas import DataFrame -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator - -metamodel_version = "None" -version = "1.3.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" - - -class VectorDataMixin(BaseModel): - """ - Mixin class to give VectorData indexing abilities - """ - - _index: Optional["VectorIndex"] = None - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - return self._index[item] - else: - return self.array[item] - - def __setitem__(self, key, value) -> None: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class VectorIndexMixin(BaseModel): - """ - Mixin class to give VectorIndex indexing abilities - """ - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - target: Optional["VectorData"] = None - - def _getitem_helper(self, arg: int): - """ - Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` - """ - - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] - return self.target.array[slice(start, end)] - - def __getitem__(self, item: Union[int, slice]) -> Any: - if self.target is None: - return self.array[item] - elif type(self.target).__name__ == "VectorData": - if isinstance(item, int): - return self._getitem_helper(item) - else: - idx = range(*item.indices(len(self.array))) - return [self._getitem_helper(i) for i in idx] - else: - raise NotImplementedError("DynamicTableRange not supported yet") - - def __setitem__(self, key, value) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class DynamicTableMixin(BaseModel): - """ - Mixin to make DynamicTable subclasses behave like tables/dataframes - - Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` - but simplifying along the way :) - """ - - model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] - NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( - "name", - "colnames", - "description", - ) - - # overridden by subclass but implemented here for testing and typechecking purposes :) - colnames: List[str] = Field(default_factory=list) - - @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - - @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... - - @overload - def __getitem__(self, item: int) -> DataFrame: ... - - @overload - def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... - - @overload - def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, - list, - "NDArray", - "VectorData", - ]: ... - - @overload - def __getitem__(self, item: slice) -> DataFrame: ... - - def __getitem__( - self, - item: Union[ - str, - int, - slice, - Tuple[int, Union[int, str]], - Tuple[Union[int, slice], ...], - ], - ) -> Any: - """ - Get an item from the table - - If item is... - - - ``str`` : get the column with this name - - ``int`` : get the row at this index - - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column - - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') - gets the 0th row from ``colname`` - - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. - returns as a :class:`pandas.DataFrame` - """ - if isinstance(item, str): - return self._columns[item] - if isinstance(item, (int, slice)): - return DataFrame.from_dict(self._slice_range(item)) - elif isinstance(item, tuple): - if len(item) != 2: - raise ValueError( - "DynamicTables are 2-dimensional, can't index with more than 2 indices like" - f" {item}" - ) - - # all other cases are tuples of (rows, cols) - rows, cols = item - if isinstance(cols, (int, slice)): - cols = self.colnames[cols] - data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) - else: - raise ValueError(f"Unsure how to get item with key {item}") - - def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None - ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - if cols is None: - cols = self.colnames - elif isinstance(cols, str): - cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} - return data - - def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") - - def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): - """ - Add a column, appending it to ``colnames`` - """ - # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): - return super().__setattr__(key, value) - - if key not in self.model_fields_set and not key.endswith("_index"): - self.colnames.append(key) - - return super().__setattr__(key, value) - - @model_validator(mode="before") - @classmethod - def create_colnames(cls, model: Dict[str, Any]): - """ - Construct colnames from arguments. - - the model dict is ordered after python3.6, so we can use that minus - anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order - """ - if "colnames" not in model: - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS - and not k.endswith("_index") - and k not in model["colnames"].keys() - ] - model["colnames"].extend(colnames) - return model - - @model_validator(mode="after") - def resolve_targets(self) -> "DynamicTableMixin": - """ - Ensure that any implicitly indexed columns are linked, and create backlinks - """ - for key, col in self._columns.items(): - if isinstance(col, VectorData): - # find an index - idx = None - for field_name in self.model_fields_set: - # implicit name-based index - field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break - if idx is not None: - col._index = idx - idx.target = col - return self - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.table/", - "id": "hdmf-common.table", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.table", - } -) - - -class VectorData(VectorDataMixin): - """ - An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VectorIndex(VectorIndexMixin): - """ - Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class ElementIdentifiers(Data): - """ - A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field( - "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} - ) - - -class DynamicTableRegion(VectorData): - """ - DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VocabData(VectorData): - """ - Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - vocabulary: Optional[str] = Field( - None, description="""The available items in the controlled vocabulary.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class DynamicTable(DynamicTableMixin): - """ - A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -VectorData.model_rebuild() -VectorIndex.model_rebuild() -ElementIdentifiers.model_rebuild() -DynamicTableRegion.model_rebuild() -VocabData.model_rebuild() -DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py deleted file mode 100644 index a2dcc70..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py +++ /dev/null @@ -1,86 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_3_0.hdmf_common_resources import ( - ExternalResources, - ExternalResourcesKeys, - ExternalResourcesResources, - ExternalResourcesObjects, - ExternalResourcesObjectKeys, -) -from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_3_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_3_0.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - VocabData, - DynamicTable, -) - -metamodel_version = "None" -version = "1.3.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common/", - "description": "Common data structures provided by HDMF", - "id": "hdmf-common", - "imports": [ - "hdmf-common.base", - "hdmf-common.table", - "hdmf-common.sparse", - "hdmf-common.resources", - "hdmf-common.nwb.language", - ], - "name": "hdmf-common", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py deleted file mode 100644 index c26f4f8..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py +++ /dev/null @@ -1,104 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np - -metamodel_version = "None" -version = "1.4.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.base/", - "id": "hdmf-common.base", - "imports": ["hdmf-common.nwb.language"], - "name": "hdmf-common.base", - } -) - - -class Data(ConfiguredBaseModel): - """ - An abstract data type for a dataset. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class Container(ConfiguredBaseModel): - """ - An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class SimpleMultiContainer(Container): - """ - A simple Container for holding onto multiple containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - children: Optional[List[Container]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} - ) - name: str = Field(...) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Data.model_rebuild() -Container.model_rebuild() -SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py deleted file mode 100644 index 83e31dd..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py +++ /dev/null @@ -1,110 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_4_0.hdmf_common_base import Container -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "1.4.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.sparse/", - "id": "hdmf-common.sparse", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.sparse", - } -) - - -class CSRMatrix(Container): - """ - A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.sparse", "tree_root": True} - ) - - name: str = Field(...) - shape: Optional[np.uint64] = Field( - None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" - ) - indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( - ..., - description="""The column indices.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} - }, - ) - indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( - ..., - description="""The row index pointer.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} - }, - ) - data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") - - -class CSRMatrixData(ConfiguredBaseModel): - """ - The non-zero values in the matrix. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -CSRMatrix.model_rebuild() -CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py deleted file mode 100644 index 20c9a63..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ /dev/null @@ -1,422 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -import numpy as np -from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container -from pandas import DataFrame -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator - -metamodel_version = "None" -version = "1.4.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" - - -class VectorDataMixin(BaseModel): - """ - Mixin class to give VectorData indexing abilities - """ - - _index: Optional["VectorIndex"] = None - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - return self._index[item] - else: - return self.array[item] - - def __setitem__(self, key, value) -> None: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class VectorIndexMixin(BaseModel): - """ - Mixin class to give VectorIndex indexing abilities - """ - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - target: Optional["VectorData"] = None - - def _getitem_helper(self, arg: int): - """ - Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` - """ - - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] - return self.target.array[slice(start, end)] - - def __getitem__(self, item: Union[int, slice]) -> Any: - if self.target is None: - return self.array[item] - elif type(self.target).__name__ == "VectorData": - if isinstance(item, int): - return self._getitem_helper(item) - else: - idx = range(*item.indices(len(self.array))) - return [self._getitem_helper(i) for i in idx] - else: - raise NotImplementedError("DynamicTableRange not supported yet") - - def __setitem__(self, key, value) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class DynamicTableMixin(BaseModel): - """ - Mixin to make DynamicTable subclasses behave like tables/dataframes - - Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` - but simplifying along the way :) - """ - - model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] - NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( - "name", - "colnames", - "description", - ) - - # overridden by subclass but implemented here for testing and typechecking purposes :) - colnames: List[str] = Field(default_factory=list) - - @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - - @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... - - @overload - def __getitem__(self, item: int) -> DataFrame: ... - - @overload - def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... - - @overload - def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, - list, - "NDArray", - "VectorData", - ]: ... - - @overload - def __getitem__(self, item: slice) -> DataFrame: ... - - def __getitem__( - self, - item: Union[ - str, - int, - slice, - Tuple[int, Union[int, str]], - Tuple[Union[int, slice], ...], - ], - ) -> Any: - """ - Get an item from the table - - If item is... - - - ``str`` : get the column with this name - - ``int`` : get the row at this index - - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column - - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') - gets the 0th row from ``colname`` - - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. - returns as a :class:`pandas.DataFrame` - """ - if isinstance(item, str): - return self._columns[item] - if isinstance(item, (int, slice)): - return DataFrame.from_dict(self._slice_range(item)) - elif isinstance(item, tuple): - if len(item) != 2: - raise ValueError( - "DynamicTables are 2-dimensional, can't index with more than 2 indices like" - f" {item}" - ) - - # all other cases are tuples of (rows, cols) - rows, cols = item - if isinstance(cols, (int, slice)): - cols = self.colnames[cols] - data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) - else: - raise ValueError(f"Unsure how to get item with key {item}") - - def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None - ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - if cols is None: - cols = self.colnames - elif isinstance(cols, str): - cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} - return data - - def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") - - def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): - """ - Add a column, appending it to ``colnames`` - """ - # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): - return super().__setattr__(key, value) - - if key not in self.model_fields_set and not key.endswith("_index"): - self.colnames.append(key) - - return super().__setattr__(key, value) - - @model_validator(mode="before") - @classmethod - def create_colnames(cls, model: Dict[str, Any]): - """ - Construct colnames from arguments. - - the model dict is ordered after python3.6, so we can use that minus - anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order - """ - if "colnames" not in model: - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS - and not k.endswith("_index") - and k not in model["colnames"].keys() - ] - model["colnames"].extend(colnames) - return model - - @model_validator(mode="after") - def resolve_targets(self) -> "DynamicTableMixin": - """ - Ensure that any implicitly indexed columns are linked, and create backlinks - """ - for key, col in self._columns.items(): - if isinstance(col, VectorData): - # find an index - idx = None - for field_name in self.model_fields_set: - # implicit name-based index - field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break - if idx is not None: - col._index = idx - idx.target = col - return self - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.table/", - "id": "hdmf-common.table", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.table", - } -) - - -class VectorData(VectorDataMixin): - """ - An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VectorIndex(VectorIndexMixin): - """ - Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class ElementIdentifiers(Data): - """ - A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field( - "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} - ) - - -class DynamicTableRegion(VectorData): - """ - DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class DynamicTable(DynamicTableMixin): - """ - A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -VectorData.model_rebuild() -VectorIndex.model_rebuild() -ElementIdentifiers.model_rebuild() -DynamicTableRegion.model_rebuild() -DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py deleted file mode 100644 index db59f28..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py +++ /dev/null @@ -1,77 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_4_0.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - DynamicTable, -) - -metamodel_version = "None" -version = "1.4.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common/", - "description": "Common data structures provided by HDMF", - "id": "hdmf-common", - "imports": [ - "hdmf-common.base", - "hdmf-common.table", - "hdmf-common.sparse", - "hdmf-common.nwb.language", - ], - "name": "hdmf-common", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py index e520f5f..49f08da 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py @@ -71,17 +71,17 @@ class CSRMatrix(Container): ) name: str = Field(...) - shape: Optional[np.uint64] = Field( + shape: Optional[int] = Field( None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" ) - indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( + indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field( ..., description="""The column indices.""", json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} }, ) - indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field( ..., description="""The row index pointer.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index ba68e69..9dbabc2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -67,7 +67,7 @@ class VectorDataMixin(BaseModel): else: return self.array[item] - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -84,7 +84,7 @@ class VectorIndexMixin(BaseModel): array: Optional[NDArray] = None target: Optional["VectorData"] = None - def _getitem_helper(self, arg: int): + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ @@ -105,7 +105,7 @@ class VectorIndexMixin(BaseModel): else: raise NotImplementedError("DynamicTableRange not supported yet") - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: # VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -122,7 +122,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -133,15 +133,15 @@ class DynamicTableMixin(BaseModel): colnames: List[str] = Field(default_factory=list) @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: return [getattr(self, k) for i, k in enumerate(self.colnames)] @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload def __getitem__(self, item: int) -> DataFrame: ... @@ -154,7 +154,7 @@ class DynamicTableMixin(BaseModel): DataFrame, list, "NDArray", - "VectorData", + "VectorDataMixin", ]: ... @overload @@ -232,7 +232,7 @@ class DynamicTableMixin(BaseModel): @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]): + def create_colnames(cls, model: Dict[str, Any]) -> None: """ Construct colnames from arguments. @@ -241,19 +241,17 @@ class DynamicTableMixin(BaseModel): """ if "colnames" not in model: colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") ] model["colnames"] = colnames else: # add any columns not explicitly given an order at the end colnames = [ k - for k in model.keys() + for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"].keys() + and k not in model["colnames"] ] model["colnames"].extend(colnames) return model @@ -270,13 +268,11 @@ class DynamicTableMixin(BaseModel): for field_name in self.model_fields_set: # implicit name-based index field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break if idx is not None: col._index = idx idx.target = col diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py deleted file mode 100644 index ad29fbc..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py +++ /dev/null @@ -1,104 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np - -metamodel_version = "None" -version = "1.5.1" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.base/", - "id": "hdmf-common.base", - "imports": ["hdmf-common.nwb.language"], - "name": "hdmf-common.base", - } -) - - -class Data(ConfiguredBaseModel): - """ - An abstract data type for a dataset. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class Container(ConfiguredBaseModel): - """ - An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class SimpleMultiContainer(Container): - """ - A simple Container for holding onto multiple containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - children: Optional[List[Container]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} - ) - name: str = Field(...) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Data.model_rebuild() -Container.model_rebuild() -SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py deleted file mode 100644 index a095b6b..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py +++ /dev/null @@ -1,110 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_5_1.hdmf_common_base import Container -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "1.5.1" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.sparse/", - "id": "hdmf-common.sparse", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.sparse", - } -) - - -class CSRMatrix(Container): - """ - A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.sparse", "tree_root": True} - ) - - name: str = Field(...) - shape: Optional[np.uint64] = Field( - None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" - ) - indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( - ..., - description="""The column indices.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} - }, - ) - indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( - ..., - description="""The row index pointer.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} - }, - ) - data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") - - -class CSRMatrixData(ConfiguredBaseModel): - """ - The non-zero values in the matrix. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -CSRMatrix.model_rebuild() -CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py deleted file mode 100644 index 91746ee..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ /dev/null @@ -1,453 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -import numpy as np -from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container -from pandas import DataFrame -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "1.5.1" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" - - -class VectorDataMixin(BaseModel): - """ - Mixin class to give VectorData indexing abilities - """ - - _index: Optional["VectorIndex"] = None - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - return self._index[item] - else: - return self.array[item] - - def __setitem__(self, key, value) -> None: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class VectorIndexMixin(BaseModel): - """ - Mixin class to give VectorIndex indexing abilities - """ - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - target: Optional["VectorData"] = None - - def _getitem_helper(self, arg: int): - """ - Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` - """ - - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] - return self.target.array[slice(start, end)] - - def __getitem__(self, item: Union[int, slice]) -> Any: - if self.target is None: - return self.array[item] - elif type(self.target).__name__ == "VectorData": - if isinstance(item, int): - return self._getitem_helper(item) - else: - idx = range(*item.indices(len(self.array))) - return [self._getitem_helper(i) for i in idx] - else: - raise NotImplementedError("DynamicTableRange not supported yet") - - def __setitem__(self, key, value) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class DynamicTableMixin(BaseModel): - """ - Mixin to make DynamicTable subclasses behave like tables/dataframes - - Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` - but simplifying along the way :) - """ - - model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] - NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( - "name", - "colnames", - "description", - ) - - # overridden by subclass but implemented here for testing and typechecking purposes :) - colnames: List[str] = Field(default_factory=list) - - @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - - @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... - - @overload - def __getitem__(self, item: int) -> DataFrame: ... - - @overload - def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... - - @overload - def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, - list, - "NDArray", - "VectorData", - ]: ... - - @overload - def __getitem__(self, item: slice) -> DataFrame: ... - - def __getitem__( - self, - item: Union[ - str, - int, - slice, - Tuple[int, Union[int, str]], - Tuple[Union[int, slice], ...], - ], - ) -> Any: - """ - Get an item from the table - - If item is... - - - ``str`` : get the column with this name - - ``int`` : get the row at this index - - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column - - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') - gets the 0th row from ``colname`` - - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. - returns as a :class:`pandas.DataFrame` - """ - if isinstance(item, str): - return self._columns[item] - if isinstance(item, (int, slice)): - return DataFrame.from_dict(self._slice_range(item)) - elif isinstance(item, tuple): - if len(item) != 2: - raise ValueError( - "DynamicTables are 2-dimensional, can't index with more than 2 indices like" - f" {item}" - ) - - # all other cases are tuples of (rows, cols) - rows, cols = item - if isinstance(cols, (int, slice)): - cols = self.colnames[cols] - data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) - else: - raise ValueError(f"Unsure how to get item with key {item}") - - def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None - ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - if cols is None: - cols = self.colnames - elif isinstance(cols, str): - cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} - return data - - def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") - - def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): - """ - Add a column, appending it to ``colnames`` - """ - # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): - return super().__setattr__(key, value) - - if key not in self.model_fields_set and not key.endswith("_index"): - self.colnames.append(key) - - return super().__setattr__(key, value) - - @model_validator(mode="before") - @classmethod - def create_colnames(cls, model: Dict[str, Any]): - """ - Construct colnames from arguments. - - the model dict is ordered after python3.6, so we can use that minus - anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order - """ - if "colnames" not in model: - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS - and not k.endswith("_index") - and k not in model["colnames"].keys() - ] - model["colnames"].extend(colnames) - return model - - @model_validator(mode="after") - def resolve_targets(self) -> "DynamicTableMixin": - """ - Ensure that any implicitly indexed columns are linked, and create backlinks - """ - for key, col in self._columns.items(): - if isinstance(col, VectorData): - # find an index - idx = None - for field_name in self.model_fields_set: - # implicit name-based index - field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break - if idx is not None: - col._index = idx - idx.target = col - return self - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.table/", - "id": "hdmf-common.table", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.table", - } -) - - -class VectorData(VectorDataMixin): - """ - An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VectorIndex(VectorIndexMixin): - """ - Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class ElementIdentifiers(Data): - """ - A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field( - "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} - ) - - -class DynamicTableRegion(VectorData): - """ - DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class DynamicTable(DynamicTableMixin): - """ - A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -class AlignedDynamicTable(DynamicTable): - """ - DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - children: Optional[List[DynamicTable]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} - ) - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -VectorData.model_rebuild() -VectorIndex.model_rebuild() -ElementIdentifiers.model_rebuild() -DynamicTableRegion.model_rebuild() -DynamicTable.model_rebuild() -AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py deleted file mode 100644 index 836122e..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py +++ /dev/null @@ -1,78 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_5_1.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - DynamicTable, - AlignedDynamicTable, -) - -metamodel_version = "None" -version = "1.5.1" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common/", - "description": "Common data structures provided by HDMF", - "id": "hdmf-common", - "imports": [ - "hdmf-common.base", - "hdmf-common.table", - "hdmf-common.sparse", - "hdmf-common.nwb.language", - ], - "name": "hdmf-common", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py deleted file mode 100644 index 499e6ad..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py +++ /dev/null @@ -1,104 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np - -metamodel_version = "None" -version = "1.6.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.base/", - "id": "hdmf-common.base", - "imports": ["hdmf-common.nwb.language"], - "name": "hdmf-common.base", - } -) - - -class Data(ConfiguredBaseModel): - """ - An abstract data type for a dataset. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class Container(ConfiguredBaseModel): - """ - An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class SimpleMultiContainer(Container): - """ - A simple Container for holding onto multiple containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - children: Optional[List[Container]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} - ) - name: str = Field(...) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Data.model_rebuild() -Container.model_rebuild() -SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py deleted file mode 100644 index 0966f74..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py +++ /dev/null @@ -1,110 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_6_0.hdmf_common_base import Container -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "1.6.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.sparse/", - "id": "hdmf-common.sparse", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.sparse", - } -) - - -class CSRMatrix(Container): - """ - A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.sparse", "tree_root": True} - ) - - name: str = Field(...) - shape: Optional[np.uint64] = Field( - None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" - ) - indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( - ..., - description="""The column indices.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} - }, - ) - indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( - ..., - description="""The row index pointer.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} - }, - ) - data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") - - -class CSRMatrixData(ConfiguredBaseModel): - """ - The non-zero values in the matrix. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -CSRMatrix.model_rebuild() -CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py deleted file mode 100644 index e0e06bc..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ /dev/null @@ -1,453 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -import numpy as np -from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container -from pandas import DataFrame -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "1.6.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" - - -class VectorDataMixin(BaseModel): - """ - Mixin class to give VectorData indexing abilities - """ - - _index: Optional["VectorIndex"] = None - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - return self._index[item] - else: - return self.array[item] - - def __setitem__(self, key, value) -> None: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class VectorIndexMixin(BaseModel): - """ - Mixin class to give VectorIndex indexing abilities - """ - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - target: Optional["VectorData"] = None - - def _getitem_helper(self, arg: int): - """ - Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` - """ - - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] - return self.target.array[slice(start, end)] - - def __getitem__(self, item: Union[int, slice]) -> Any: - if self.target is None: - return self.array[item] - elif type(self.target).__name__ == "VectorData": - if isinstance(item, int): - return self._getitem_helper(item) - else: - idx = range(*item.indices(len(self.array))) - return [self._getitem_helper(i) for i in idx] - else: - raise NotImplementedError("DynamicTableRange not supported yet") - - def __setitem__(self, key, value) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class DynamicTableMixin(BaseModel): - """ - Mixin to make DynamicTable subclasses behave like tables/dataframes - - Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` - but simplifying along the way :) - """ - - model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] - NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( - "name", - "colnames", - "description", - ) - - # overridden by subclass but implemented here for testing and typechecking purposes :) - colnames: List[str] = Field(default_factory=list) - - @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - - @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... - - @overload - def __getitem__(self, item: int) -> DataFrame: ... - - @overload - def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... - - @overload - def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, - list, - "NDArray", - "VectorData", - ]: ... - - @overload - def __getitem__(self, item: slice) -> DataFrame: ... - - def __getitem__( - self, - item: Union[ - str, - int, - slice, - Tuple[int, Union[int, str]], - Tuple[Union[int, slice], ...], - ], - ) -> Any: - """ - Get an item from the table - - If item is... - - - ``str`` : get the column with this name - - ``int`` : get the row at this index - - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column - - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') - gets the 0th row from ``colname`` - - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. - returns as a :class:`pandas.DataFrame` - """ - if isinstance(item, str): - return self._columns[item] - if isinstance(item, (int, slice)): - return DataFrame.from_dict(self._slice_range(item)) - elif isinstance(item, tuple): - if len(item) != 2: - raise ValueError( - "DynamicTables are 2-dimensional, can't index with more than 2 indices like" - f" {item}" - ) - - # all other cases are tuples of (rows, cols) - rows, cols = item - if isinstance(cols, (int, slice)): - cols = self.colnames[cols] - data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) - else: - raise ValueError(f"Unsure how to get item with key {item}") - - def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None - ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - if cols is None: - cols = self.colnames - elif isinstance(cols, str): - cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} - return data - - def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") - - def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): - """ - Add a column, appending it to ``colnames`` - """ - # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): - return super().__setattr__(key, value) - - if key not in self.model_fields_set and not key.endswith("_index"): - self.colnames.append(key) - - return super().__setattr__(key, value) - - @model_validator(mode="before") - @classmethod - def create_colnames(cls, model: Dict[str, Any]): - """ - Construct colnames from arguments. - - the model dict is ordered after python3.6, so we can use that minus - anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order - """ - if "colnames" not in model: - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS - and not k.endswith("_index") - and k not in model["colnames"].keys() - ] - model["colnames"].extend(colnames) - return model - - @model_validator(mode="after") - def resolve_targets(self) -> "DynamicTableMixin": - """ - Ensure that any implicitly indexed columns are linked, and create backlinks - """ - for key, col in self._columns.items(): - if isinstance(col, VectorData): - # find an index - idx = None - for field_name in self.model_fields_set: - # implicit name-based index - field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break - if idx is not None: - col._index = idx - idx.target = col - return self - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.table/", - "id": "hdmf-common.table", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.table", - } -) - - -class VectorData(VectorDataMixin): - """ - An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VectorIndex(VectorIndexMixin): - """ - Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class ElementIdentifiers(Data): - """ - A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field( - "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} - ) - - -class DynamicTableRegion(VectorData): - """ - DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class DynamicTable(DynamicTableMixin): - """ - A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -class AlignedDynamicTable(DynamicTable): - """ - DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - children: Optional[List[DynamicTable]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} - ) - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -VectorData.model_rebuild() -VectorIndex.model_rebuild() -ElementIdentifiers.model_rebuild() -DynamicTableRegion.model_rebuild() -DynamicTable.model_rebuild() -AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py deleted file mode 100644 index 1dc832f..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py +++ /dev/null @@ -1,78 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_6_0.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - DynamicTable, - AlignedDynamicTable, -) - -metamodel_version = "None" -version = "1.6.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common/", - "description": "Common data structures provided by HDMF", - "id": "hdmf-common", - "imports": [ - "hdmf-common.base", - "hdmf-common.table", - "hdmf-common.sparse", - "hdmf-common.nwb.language", - ], - "name": "hdmf-common", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py deleted file mode 100644 index 751693c..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py +++ /dev/null @@ -1,104 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np - -metamodel_version = "None" -version = "1.7.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.base/", - "id": "hdmf-common.base", - "imports": ["hdmf-common.nwb.language"], - "name": "hdmf-common.base", - } -) - - -class Data(ConfiguredBaseModel): - """ - An abstract data type for a dataset. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class Container(ConfiguredBaseModel): - """ - An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - name: str = Field(...) - - -class SimpleMultiContainer(Container): - """ - A simple Container for holding onto multiple containers. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.base", "tree_root": True} - ) - - children: Optional[List[Container]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} - ) - name: str = Field(...) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Data.model_rebuild() -Container.model_rebuild() -SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py deleted file mode 100644 index c6bfab5..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py +++ /dev/null @@ -1,110 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_7_0.hdmf_common_base import Container -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "1.7.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.sparse/", - "id": "hdmf-common.sparse", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.sparse", - } -) - - -class CSRMatrix(Container): - """ - A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.sparse", "tree_root": True} - ) - - name: str = Field(...) - shape: Optional[np.uint64] = Field( - None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" - ) - indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( - ..., - description="""The column indices.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} - }, - ) - indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( - ..., - description="""The row index pointer.""", - json_schema_extra={ - "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} - }, - ) - data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") - - -class CSRMatrixData(ConfiguredBaseModel): - """ - The non-zero values in the matrix. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) - - name: Literal["data"] = Field( - "data", - json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -CSRMatrix.model_rebuild() -CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py deleted file mode 100644 index 65a9dd3..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ /dev/null @@ -1,453 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -import numpy as np -from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container -from pandas import DataFrame -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "1.7.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" - - -class VectorDataMixin(BaseModel): - """ - Mixin class to give VectorData indexing abilities - """ - - _index: Optional["VectorIndex"] = None - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - return self._index[item] - else: - return self.array[item] - - def __setitem__(self, key, value) -> None: - if self._index: - # Following hdmf, VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class VectorIndexMixin(BaseModel): - """ - Mixin class to give VectorIndex indexing abilities - """ - - # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None - target: Optional["VectorData"] = None - - def _getitem_helper(self, arg: int): - """ - Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` - """ - - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] - return self.target.array[slice(start, end)] - - def __getitem__(self, item: Union[int, slice]) -> Any: - if self.target is None: - return self.array[item] - elif type(self.target).__name__ == "VectorData": - if isinstance(item, int): - return self._getitem_helper(item) - else: - idx = range(*item.indices(len(self.array))) - return [self._getitem_helper(i) for i in idx] - else: - raise NotImplementedError("DynamicTableRange not supported yet") - - def __setitem__(self, key, value) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value - else: - self.array[key] = value - - -class DynamicTableMixin(BaseModel): - """ - Mixin to make DynamicTable subclasses behave like tables/dataframes - - Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` - but simplifying along the way :) - """ - - model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] - NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( - "name", - "colnames", - "description", - ) - - # overridden by subclass but implemented here for testing and typechecking purposes :) - colnames: List[str] = Field(default_factory=list) - - @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - - @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... - - @overload - def __getitem__(self, item: int) -> DataFrame: ... - - @overload - def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... - - @overload - def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, - list, - "NDArray", - "VectorData", - ]: ... - - @overload - def __getitem__(self, item: slice) -> DataFrame: ... - - def __getitem__( - self, - item: Union[ - str, - int, - slice, - Tuple[int, Union[int, str]], - Tuple[Union[int, slice], ...], - ], - ) -> Any: - """ - Get an item from the table - - If item is... - - - ``str`` : get the column with this name - - ``int`` : get the row at this index - - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column - - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') - gets the 0th row from ``colname`` - - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. - returns as a :class:`pandas.DataFrame` - """ - if isinstance(item, str): - return self._columns[item] - if isinstance(item, (int, slice)): - return DataFrame.from_dict(self._slice_range(item)) - elif isinstance(item, tuple): - if len(item) != 2: - raise ValueError( - "DynamicTables are 2-dimensional, can't index with more than 2 indices like" - f" {item}" - ) - - # all other cases are tuples of (rows, cols) - rows, cols = item - if isinstance(cols, (int, slice)): - cols = self.colnames[cols] - data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) - else: - raise ValueError(f"Unsure how to get item with key {item}") - - def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None - ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: - if cols is None: - cols = self.colnames - elif isinstance(cols, str): - cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} - return data - - def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") - - def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): - """ - Add a column, appending it to ``colnames`` - """ - # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): - return super().__setattr__(key, value) - - if key not in self.model_fields_set and not key.endswith("_index"): - self.colnames.append(key) - - return super().__setattr__(key, value) - - @model_validator(mode="before") - @classmethod - def create_colnames(cls, model: Dict[str, Any]): - """ - Construct colnames from arguments. - - the model dict is ordered after python3.6, so we can use that minus - anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order - """ - if "colnames" not in model: - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end - colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS - and not k.endswith("_index") - and k not in model["colnames"].keys() - ] - model["colnames"].extend(colnames) - return model - - @model_validator(mode="after") - def resolve_targets(self) -> "DynamicTableMixin": - """ - Ensure that any implicitly indexed columns are linked, and create backlinks - """ - for key, col in self._columns.items(): - if isinstance(col, VectorData): - # find an index - idx = None - for field_name in self.model_fields_set: - # implicit name-based index - field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break - if idx is not None: - col._index = idx - idx.target = col - return self - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common.table/", - "id": "hdmf-common.table", - "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], - "name": "hdmf-common.table", - } -) - - -class VectorData(VectorDataMixin): - """ - An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class VectorIndex(VectorIndexMixin): - """ - Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class ElementIdentifiers(Data): - """ - A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field( - "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} - ) - - -class DynamicTableRegion(VectorData): - """ - DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -class DynamicTable(DynamicTableMixin): - """ - A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -class AlignedDynamicTable(DynamicTable): - """ - DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-common.table", "tree_root": True} - ) - - children: Optional[List[DynamicTable]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} - ) - name: str = Field(...) - colnames: Optional[str] = Field( - None, - description="""The names of the columns in this table. This should be used to specify an order to the columns.""", - ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) - id: NDArray[Shape["* num_rows"], int] = Field( - ..., - description="""Array of unique identifiers for the rows of this dynamic table.""", - json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, - ) - vector_data: Optional[List[VectorData]] = Field( - None, description="""Vector columns, including index columns, of this dynamic table.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -VectorData.model_rebuild() -VectorIndex.model_rebuild() -ElementIdentifiers.model_rebuild() -DynamicTableRegion.model_rebuild() -DynamicTable.model_rebuild() -AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py deleted file mode 100644 index 7d70e39..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py +++ /dev/null @@ -1,78 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_7_0.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - DynamicTable, - AlignedDynamicTable, -) - -metamodel_version = "None" -version = "1.7.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-common"}, - }, - "default_prefix": "hdmf-common/", - "description": "Common data structures provided by HDMF", - "id": "hdmf-common", - "imports": [ - "hdmf-common.base", - "hdmf-common.table", - "hdmf-common.sparse", - "hdmf-common.nwb.language", - ], - "name": "hdmf-common", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py index 230460c..56fad93 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py @@ -71,17 +71,17 @@ class CSRMatrix(Container): ) name: str = Field(...) - shape: Optional[np.uint64] = Field( + shape: Optional[int] = Field( None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" ) - indices: NDArray[Shape["* number_of_non_zero_values"], np.uint64] = Field( + indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field( ..., description="""The column indices.""", json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} }, ) - indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], np.uint64] = Field( + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field( ..., description="""The row index pointer.""", json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 986c628..e9ce91d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -67,7 +67,7 @@ class VectorDataMixin(BaseModel): else: return self.array[item] - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -84,7 +84,7 @@ class VectorIndexMixin(BaseModel): array: Optional[NDArray] = None target: Optional["VectorData"] = None - def _getitem_helper(self, arg: int): + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ @@ -105,7 +105,7 @@ class VectorIndexMixin(BaseModel): else: raise NotImplementedError("DynamicTableRange not supported yet") - def __setitem__(self, key, value) -> None: + def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: # VectorIndex is the thing that knows how to do the slicing self._index[key] = value @@ -122,7 +122,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorData"]] + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -133,15 +133,15 @@ class DynamicTableMixin(BaseModel): colnames: List[str] = Field(default_factory=list) @property - def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorData"]]: + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: return [getattr(self, k) for i, k in enumerate(self.colnames)] @overload - def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorData"]: ... + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload def __getitem__(self, item: int) -> DataFrame: ... @@ -154,7 +154,7 @@ class DynamicTableMixin(BaseModel): DataFrame, list, "NDArray", - "VectorData", + "VectorDataMixin", ]: ... @overload @@ -232,7 +232,7 @@ class DynamicTableMixin(BaseModel): @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]): + def create_colnames(cls, model: Dict[str, Any]) -> None: """ Construct colnames from arguments. @@ -241,19 +241,17 @@ class DynamicTableMixin(BaseModel): """ if "colnames" not in model: colnames = [ - k - for k in model.keys() - if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") ] model["colnames"] = colnames else: # add any columns not explicitly given an order at the end colnames = [ k - for k in model.keys() + for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"].keys() + and k not in model["colnames"] ] model["colnames"].extend(colnames) return model @@ -270,13 +268,11 @@ class DynamicTableMixin(BaseModel): for field_name in self.model_fields_set: # implicit name-based index field = getattr(self, field_name) - if isinstance(field, VectorIndex): - if field_name == f"{key}_index": - idx = field - break - elif field.target is col: - idx = field - break + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break if idx is not None: col._index = idx idx.target = col diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index 0ca8353..065f135 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -7,7 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...hdmf_common.v1_4_0.hdmf_common_table import VectorData +from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData from numpydantic import NDArray, Shape metamodel_version = "None" @@ -55,7 +55,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.experimental/", "id": "hdmf-experimental.experimental", - "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.experimental", } ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index affdb23..366822a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -7,7 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data +from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data metamodel_version = "None" version = "0.1.0" @@ -53,7 +53,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.resources/", "id": "hdmf-experimental.resources", - "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.resources", } ) @@ -118,8 +118,8 @@ class ExternalResourcesEntities(Data): "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} }, ) - keys_idx: np.uint64 = Field(..., description="""The index to the key in the 'keys' table.""") - resources_idx: np.uint64 = Field(..., description="""The index into the 'resources' table""") + keys_idx: int = Field(..., description="""The index to the key in the 'keys' table.""") + resources_idx: int = Field(..., description="""The index into the 'resources' table""") entity_id: str = Field(..., description="""The unique identifier entity.""") entity_uri: str = Field( ..., @@ -179,10 +179,10 @@ class ExternalResourcesObjectKeys(Data): "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} }, ) - objects_idx: np.uint64 = Field( + objects_idx: int = Field( ..., description="""The index to the 'objects' table for the object that holds the key.""" ) - keys_idx: np.uint64 = Field(..., description="""The index to the 'keys' table for the key.""") + keys_idx: int = Field(..., description="""The index to the 'keys' table for the key.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py index 7ea10f7..69ffad1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py @@ -15,14 +15,15 @@ from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import ( ExternalResourcesObjects, ExternalResourcesObjectKeys, ) -from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_4_0.hdmf_common_table import ( +from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_5_0.hdmf_common_table import ( VectorData, VectorIndex, ElementIdentifiers, DynamicTableRegion, DynamicTable, + AlignedDynamicTable, ) from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py deleted file mode 100644 index 7fef3e3..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py +++ /dev/null @@ -1,93 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_5_1.hdmf_common_table import VectorData -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "0.2.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, - }, - "default_prefix": "hdmf-experimental.experimental/", - "id": "hdmf-experimental.experimental", - "imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"], - "name": "hdmf-experimental.experimental", - } -) - - -class EnumData(VectorData): - """ - Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-experimental.experimental", "tree_root": True} - ) - - name: str = Field(...) - elements: Optional[VectorData] = Field( - None, - description="""Reference to the VectorData object that contains the enumerable elements""", - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py deleted file mode 100644 index 7606660..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py +++ /dev/null @@ -1,199 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data - -metamodel_version = "None" -version = "0.2.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, - }, - "default_prefix": "hdmf-experimental.resources/", - "id": "hdmf-experimental.resources", - "imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"], - "name": "hdmf-experimental.resources", - } -) - - -class ExternalResources(Container): - """ - A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-experimental.resources", "tree_root": True} - ) - - name: str = Field(...) - keys: ExternalResourcesKeys = Field( - ..., - description="""A table for storing user terms that are used to refer to external resources.""", - ) - entities: ExternalResourcesEntities = Field( - ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" - ) - resources: ExternalResourcesResources = Field( - ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" - ) - objects: ExternalResourcesObjects = Field( - ..., - description="""A table for identifying which objects in a file contain references to external resources.""", - ) - object_keys: ExternalResourcesObjectKeys = Field( - ..., description="""A table for identifying which objects use which keys.""" - ) - - -class ExternalResourcesKeys(Data): - """ - A table for storing user terms that are used to refer to external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["keys"] = Field( - "keys", - json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, - ) - key: str = Field( - ..., - description="""The user term that maps to one or more resources in the 'resources' table.""", - ) - - -class ExternalResourcesEntities(Data): - """ - A table for mapping user terms (i.e., keys) to resource entities. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["entities"] = Field( - "entities", - json_schema_extra={ - "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} - }, - ) - keys_idx: np.uint64 = Field(..., description="""The index to the key in the 'keys' table.""") - resources_idx: np.uint64 = Field(..., description="""The index into the 'resources' table""") - entity_id: str = Field(..., description="""The unique identifier entity.""") - entity_uri: str = Field( - ..., - description="""The URI for the entity this reference applies to. This can be an empty string.""", - ) - - -class ExternalResourcesResources(Data): - """ - A table for mapping user terms (i.e., keys) to resource entities. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["resources"] = Field( - "resources", - json_schema_extra={ - "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"} - }, - ) - resource: str = Field(..., description="""The name of the resource.""") - resource_uri: str = Field( - ..., description="""The URI for the resource. This can be an empty string.""" - ) - - -class ExternalResourcesObjects(Data): - """ - A table for identifying which objects in a file contain references to external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["objects"] = Field( - "objects", - json_schema_extra={ - "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} - }, - ) - object_id: str = Field(..., description="""The UUID for the object.""") - relative_path: str = Field( - ..., - description="""The relative path from the container with the object_id to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the container is a dataset which contains the value(s) that is associated with an external resource.""", - ) - field: str = Field( - ..., - description="""The field of the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", - ) - - -class ExternalResourcesObjectKeys(Data): - """ - A table for identifying which objects use which keys. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["object_keys"] = Field( - "object_keys", - json_schema_extra={ - "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} - }, - ) - objects_idx: np.uint64 = Field( - ..., description="""The index to the 'objects' table for the object that holds the key.""" - ) - keys_idx: np.uint64 = Field(..., description="""The index to the 'keys' table for the key.""") - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -ExternalResources.model_rebuild() -ExternalResourcesKeys.model_rebuild() -ExternalResourcesEntities.model_rebuild() -ExternalResourcesResources.model_rebuild() -ExternalResourcesObjects.model_rebuild() -ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py deleted file mode 100644 index 1345536..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py +++ /dev/null @@ -1,89 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_experimental.v0_2_0.hdmf_experimental_resources import ( - ExternalResources, - ExternalResourcesKeys, - ExternalResourcesEntities, - ExternalResourcesResources, - ExternalResourcesObjects, - ExternalResourcesObjectKeys, -) -from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_5_1.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - DynamicTable, - AlignedDynamicTable, -) -from ...hdmf_experimental.v0_2_0.hdmf_experimental_experimental import EnumData - -metamodel_version = "None" -version = "0.2.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, - }, - "default_prefix": "hdmf-experimental/", - "description": ( - "Experimental data structures provided by HDMF. These are not " - "guaranteed to be available in the future." - ), - "id": "hdmf-experimental", - "imports": [ - "hdmf-experimental.experimental", - "hdmf-experimental.resources", - "hdmf-experimental.nwb.language", - ], - "name": "hdmf-experimental", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py deleted file mode 100644 index f57439f..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py +++ /dev/null @@ -1,93 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_6_0.hdmf_common_table import VectorData -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "0.3.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, - }, - "default_prefix": "hdmf-experimental.experimental/", - "id": "hdmf-experimental.experimental", - "imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"], - "name": "hdmf-experimental.experimental", - } -) - - -class EnumData(VectorData): - """ - Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-experimental.experimental", "tree_root": True} - ) - - name: str = Field(...) - elements: Optional[VectorData] = Field( - None, - description="""Reference to the VectorData object that contains the enumerable elements""", - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py deleted file mode 100644 index 600eb18..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py +++ /dev/null @@ -1,207 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data - -metamodel_version = "None" -version = "0.3.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, - }, - "default_prefix": "hdmf-experimental.resources/", - "id": "hdmf-experimental.resources", - "imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"], - "name": "hdmf-experimental.resources", - } -) - - -class ExternalResources(Container): - """ - A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-experimental.resources", "tree_root": True} - ) - - name: str = Field(...) - keys: ExternalResourcesKeys = Field( - ..., - description="""A table for storing user terms that are used to refer to external resources.""", - ) - files: ExternalResourcesFiles = Field( - ..., description="""A table for storing object ids of files used in external resources.""" - ) - entities: ExternalResourcesEntities = Field( - ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" - ) - objects: ExternalResourcesObjects = Field( - ..., - description="""A table for identifying which objects in a file contain references to external resources.""", - ) - object_keys: ExternalResourcesObjectKeys = Field( - ..., description="""A table for identifying which objects use which keys.""" - ) - - -class ExternalResourcesKeys(Data): - """ - A table for storing user terms that are used to refer to external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["keys"] = Field( - "keys", - json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, - ) - key: str = Field( - ..., - description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", - ) - - -class ExternalResourcesFiles(Data): - """ - A table for storing object ids of files used in external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["files"] = Field( - "files", - json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}}, - ) - file_object_id: str = Field( - ..., - description="""The object id (UUID) of a file that contains objects that refers to external resources.""", - ) - - -class ExternalResourcesEntities(Data): - """ - A table for mapping user terms (i.e., keys) to resource entities. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["entities"] = Field( - "entities", - json_schema_extra={ - "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} - }, - ) - keys_idx: np.uint64 = Field( - ..., description="""The row index to the key in the `keys` table.""" - ) - entity_id: str = Field( - ..., - description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", - ) - entity_uri: str = Field( - ..., - description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", - ) - - -class ExternalResourcesObjects(Data): - """ - A table for identifying which objects in a file contain references to external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["objects"] = Field( - "objects", - json_schema_extra={ - "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} - }, - ) - files_idx: np.uint64 = Field( - ..., description="""The row index to the file in the `files` table containing the object.""" - ) - object_id: str = Field(..., description="""The object id (UUID) of the object.""") - object_type: str = Field(..., description="""The data type of the object.""") - relative_path: str = Field( - ..., - description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", - ) - field: str = Field( - ..., - description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", - ) - - -class ExternalResourcesObjectKeys(Data): - """ - A table for identifying which objects use which keys. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["object_keys"] = Field( - "object_keys", - json_schema_extra={ - "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} - }, - ) - objects_idx: np.uint64 = Field( - ..., description="""The row index to the object in the `objects` table that holds the key""" - ) - keys_idx: np.uint64 = Field( - ..., description="""The row index to the key in the `keys` table.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -ExternalResources.model_rebuild() -ExternalResourcesKeys.model_rebuild() -ExternalResourcesFiles.model_rebuild() -ExternalResourcesEntities.model_rebuild() -ExternalResourcesObjects.model_rebuild() -ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py deleted file mode 100644 index 8361004..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py +++ /dev/null @@ -1,89 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_experimental.v0_3_0.hdmf_experimental_resources import ( - ExternalResources, - ExternalResourcesKeys, - ExternalResourcesFiles, - ExternalResourcesEntities, - ExternalResourcesObjects, - ExternalResourcesObjectKeys, -) -from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_6_0.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - DynamicTable, - AlignedDynamicTable, -) -from ...hdmf_experimental.v0_3_0.hdmf_experimental_experimental import EnumData - -metamodel_version = "None" -version = "0.3.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, - }, - "default_prefix": "hdmf-experimental/", - "description": ( - "Experimental data structures provided by HDMF. These are not " - "guaranteed to be available in the future." - ), - "id": "hdmf-experimental", - "imports": [ - "hdmf-experimental.experimental", - "hdmf-experimental.resources", - "hdmf-experimental.nwb.language", - ], - "name": "hdmf-experimental", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py deleted file mode 100644 index 14e4d1a..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py +++ /dev/null @@ -1,93 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_7_0.hdmf_common_table import VectorData -from numpydantic import NDArray, Shape - -metamodel_version = "None" -version = "0.4.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -NUMPYDANTIC_VERSION = "1.2.1" -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, - }, - "default_prefix": "hdmf-experimental.experimental/", - "id": "hdmf-experimental.experimental", - "imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"], - "name": "hdmf-experimental.experimental", - } -) - - -class EnumData(VectorData): - """ - Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-experimental.experimental", "tree_root": True} - ) - - name: str = Field(...) - elements: Optional[VectorData] = Field( - None, - description="""Reference to the VectorData object that contains the enumerable elements""", - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) - array: Optional[ - Union[ - NDArray[Shape["* dim0"], Any], - NDArray[Shape["* dim0, * dim1"], Any], - NDArray[Shape["* dim0, * dim1, * dim2"], Any], - NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], - ] - ] = Field(None) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py deleted file mode 100644 index 6279463..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py +++ /dev/null @@ -1,229 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data - -metamodel_version = "None" -version = "0.4.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": False}, - "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, - }, - "default_prefix": "hdmf-experimental.resources/", - "id": "hdmf-experimental.resources", - "imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"], - "name": "hdmf-experimental.resources", - } -) - - -class ExternalResources(Container): - """ - A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( - {"from_schema": "hdmf-experimental.resources", "tree_root": True} - ) - - name: str = Field(...) - keys: ExternalResourcesKeys = Field( - ..., - description="""A table for storing user terms that are used to refer to external resources.""", - ) - files: ExternalResourcesFiles = Field( - ..., description="""A table for storing object ids of files used in external resources.""" - ) - entities: ExternalResourcesEntities = Field( - ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" - ) - objects: ExternalResourcesObjects = Field( - ..., - description="""A table for identifying which objects in a file contain references to external resources.""", - ) - object_keys: ExternalResourcesObjectKeys = Field( - ..., description="""A table for identifying which objects use which keys.""" - ) - entity_keys: ExternalResourcesEntityKeys = Field( - ..., description="""A table for identifying which keys use which entity.""" - ) - - -class ExternalResourcesKeys(Data): - """ - A table for storing user terms that are used to refer to external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["keys"] = Field( - "keys", - json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, - ) - key: str = Field( - ..., - description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", - ) - - -class ExternalResourcesFiles(Data): - """ - A table for storing object ids of files used in external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["files"] = Field( - "files", - json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}}, - ) - file_object_id: str = Field( - ..., - description="""The object id (UUID) of a file that contains objects that refers to external resources.""", - ) - - -class ExternalResourcesEntities(Data): - """ - A table for mapping user terms (i.e., keys) to resource entities. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["entities"] = Field( - "entities", - json_schema_extra={ - "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} - }, - ) - entity_id: str = Field( - ..., - description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", - ) - entity_uri: str = Field( - ..., - description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", - ) - - -class ExternalResourcesObjects(Data): - """ - A table for identifying which objects in a file contain references to external resources. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["objects"] = Field( - "objects", - json_schema_extra={ - "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} - }, - ) - files_idx: np.uint64 = Field( - ..., description="""The row index to the file in the `files` table containing the object.""" - ) - object_id: str = Field(..., description="""The object id (UUID) of the object.""") - object_type: str = Field(..., description="""The data type of the object.""") - relative_path: str = Field( - ..., - description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", - ) - field: str = Field( - ..., - description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", - ) - - -class ExternalResourcesObjectKeys(Data): - """ - A table for identifying which objects use which keys. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["object_keys"] = Field( - "object_keys", - json_schema_extra={ - "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} - }, - ) - objects_idx: np.uint64 = Field( - ..., description="""The row index to the object in the `objects` table that holds the key""" - ) - keys_idx: np.uint64 = Field( - ..., description="""The row index to the key in the `keys` table.""" - ) - - -class ExternalResourcesEntityKeys(Data): - """ - A table for identifying which keys use which entity. - """ - - linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) - - name: Literal["entity_keys"] = Field( - "entity_keys", - json_schema_extra={ - "linkml_meta": {"equals_string": "entity_keys", "ifabsent": "string(entity_keys)"} - }, - ) - entities_idx: np.uint64 = Field( - ..., description="""The row index to the entity in the `entities` table.""" - ) - keys_idx: np.uint64 = Field( - ..., description="""The row index to the key in the `keys` table.""" - ) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -ExternalResources.model_rebuild() -ExternalResourcesKeys.model_rebuild() -ExternalResourcesFiles.model_rebuild() -ExternalResourcesEntities.model_rebuild() -ExternalResourcesObjects.model_rebuild() -ExternalResourcesObjectKeys.model_rebuild() -ExternalResourcesEntityKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py deleted file mode 100644 index c642308..0000000 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py +++ /dev/null @@ -1,90 +0,0 @@ -from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator -import numpy as np -from ...hdmf_experimental.v0_4_0.hdmf_experimental_resources import ( - ExternalResources, - ExternalResourcesKeys, - ExternalResourcesFiles, - ExternalResourcesEntities, - ExternalResourcesObjects, - ExternalResourcesObjectKeys, - ExternalResourcesEntityKeys, -) -from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_7_0.hdmf_common_table import ( - VectorData, - VectorIndex, - ElementIdentifiers, - DynamicTableRegion, - DynamicTable, - AlignedDynamicTable, -) -from ...hdmf_experimental.v0_4_0.hdmf_experimental_experimental import EnumData - -metamodel_version = "None" -version = "0.4.0" - - -class ConfiguredBaseModel(BaseModel): - model_config = ConfigDict( - validate_assignment=True, - validate_default=True, - extra="forbid", - arbitrary_types_allowed=True, - use_enum_values=True, - strict=False, - ) - hdf5_path: Optional[str] = Field( - None, description="The absolute path that this object is stored in an NWB file" - ) - object_id: Optional[str] = Field(None, description="Unique UUID for each object") - - -class LinkMLMeta(RootModel): - root: Dict[str, Any] = {} - model_config = ConfigDict(frozen=True) - - def __getattr__(self, key: str): - return getattr(self.root, key) - - def __getitem__(self, key: str): - return self.root[key] - - def __setitem__(self, key: str, value): - self.root[key] = value - - def __contains__(self, key: str) -> bool: - return key in self.root - - -linkml_meta = LinkMLMeta( - { - "annotations": { - "is_namespace": {"tag": "is_namespace", "value": True}, - "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, - }, - "default_prefix": "hdmf-experimental/", - "description": ( - "Experimental data structures provided by HDMF. These are not " - "guaranteed to be available in the future." - ), - "id": "hdmf-experimental", - "imports": [ - "hdmf-experimental.experimental", - "hdmf-experimental.resources", - "hdmf-experimental.nwb.language", - ], - "name": "hdmf-experimental", - } -) - - -# Model rebuild -# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py index cde0cca..8c33e35 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py @@ -161,7 +161,7 @@ class HERDObjects(Data): "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} }, ) - files_idx: np.uint64 = Field( + files_idx: int = Field( ..., description="""The row index to the file in the `files` table containing the object.""" ) object_id: str = Field(..., description="""The object id (UUID) of the object.""") @@ -189,12 +189,10 @@ class HERDObjectKeys(Data): "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} }, ) - objects_idx: np.uint64 = Field( + objects_idx: int = Field( ..., description="""The row index to the object in the `objects` table that holds the key""" ) - keys_idx: np.uint64 = Field( - ..., description="""The row index to the key in the `keys` table.""" - ) + keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") class HERDEntityKeys(Data): @@ -210,12 +208,10 @@ class HERDEntityKeys(Data): "linkml_meta": {"equals_string": "entity_keys", "ifabsent": "string(entity_keys)"} }, ) - entities_idx: np.uint64 = Field( + entities_idx: int = Field( ..., description="""The row index to the entity in the `entities` table.""" ) - keys_idx: np.uint64 = Field( - ..., description="""The row index to the key in the `keys` table.""" - ) + keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml index f48262a..e42c742 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml index 35776f9..c7a8401 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-common + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml index 35776f9..c7a8401 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-common + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml index 35776f9..c7a8401 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-common + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml deleted file mode 100644 index ff30beb..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml +++ /dev/null @@ -1,33 +0,0 @@ -name: hdmf-common.base -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.base -version: 1.2.0 -imports: -- hdmf-common.nwb.language -default_prefix: hdmf-common.base/ -classes: - Data: - name: Data - description: An abstract data type for a dataset. - attributes: - name: - name: name - range: string - required: true - tree_root: true - Container: - name: Container - description: An abstract data type for a group storing collections of data and - metadata. Base type for all data and metadata containers. - attributes: - name: - name: name - range: string - required: true - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml deleted file mode 100644 index 35776f9..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-common.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-common -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml deleted file mode 100644 index 2d9616c..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml +++ /dev/null @@ -1,75 +0,0 @@ -name: hdmf-common.sparse -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.sparse -version: 1.2.0 -imports: -- hdmf-common.nwb.language -default_prefix: hdmf-common.sparse/ -classes: - CSRMatrix: - name: CSRMatrix - description: a compressed sparse row matrix - attributes: - name: - name: name - range: string - required: true - shape: - name: shape - description: the shape of this sparse matrix - range: int - indices: - name: indices - description: column indices - range: CSRMatrix__indices - required: true - multivalued: false - indptr: - name: indptr - description: index pointer - range: CSRMatrix__indptr - required: true - multivalued: false - data: - name: data - description: values in the matrix - range: CSRMatrix__data - required: true - multivalued: false - tree_root: true - CSRMatrix__indices: - name: CSRMatrix__indices - description: column indices - attributes: - name: - name: name - ifabsent: string(indices) - range: string - required: true - equals_string: indices - CSRMatrix__indptr: - name: CSRMatrix__indptr - description: index pointer - attributes: - name: - name: name - ifabsent: string(indptr) - range: string - required: true - equals_string: indptr - CSRMatrix__data: - name: CSRMatrix__data - description: values in the matrix - attributes: - name: - name: name - ifabsent: string(data) - range: string - required: true - equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml deleted file mode 100644 index accfb99..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml +++ /dev/null @@ -1,181 +0,0 @@ -name: hdmf-common.table -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.table -version: 1.2.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.table/ -classes: - VectorData: - name: VectorData - description: An n-dimensional dataset representing a column of a DynamicTable. - If used without an accompanying VectorIndex, first dimension is along the rows - of the DynamicTable and each step along the first dimension is a cell of the - larger table. VectorData can also be used to represent a ragged array if paired - with a VectorIndex. This allows for storing arrays of varying length in a single - cell of the DynamicTable by indexing into this VectorData. The first vector - is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], - and so on. - is_a: Data - attributes: - name: - name: name - range: string - required: true - description: - name: description - description: Description of what these vectors represent. - range: text - array: - name: array - range: AnyType - any_of: - - array: - dimensions: - - alias: dim0 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - alias: dim3 - tree_root: true - VectorIndex: - name: VectorIndex - description: Used with VectorData to encode a ragged array. An array of indices - into the first dimension of the target VectorData, and forming a map between - the rows of a DynamicTable and the indices of the VectorData. The name of the - VectorIndex is expected to be the name of the target VectorData object followed - by "_index". - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - target: - name: target - description: Reference to the target dataset that this index applies to. - range: VectorData - tree_root: true - ElementIdentifiers: - name: ElementIdentifiers - description: A list of unique identifiers for values within a dataset, e.g. rows - of a DynamicTable. - is_a: Data - attributes: - name: - name: name - ifabsent: string(element_id) - range: string - required: true - tree_root: true - DynamicTableRegion: - name: DynamicTableRegion - description: DynamicTableRegion provides a link from one table to an index or - region of another. The `table` attribute is a link to another `DynamicTable`, - indicating which table is referenced, and the data is int(s) indicating the - row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to - associate rows with repeated meta-data without data duplication. They can also - be used to create hierarchical relationships between multiple `DynamicTable`s. - `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create - ragged references, so a single cell of a `DynamicTable` can reference many rows - of another `DynamicTable`. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - table: - name: table - description: Reference to the DynamicTable object that this region applies - to. - range: DynamicTable - description: - name: description - description: Description of what this table region points to. - range: text - tree_root: true - VocabData: - name: VocabData - description: Data that come from a controlled vocabulary of text values. A data - value of i corresponds to the i-th element in the 'vocabulary' array attribute. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - vocabulary: - name: vocabulary - description: The available items in the controlled vocabulary. - range: text - tree_root: true - DynamicTable: - name: DynamicTable - description: A group containing multiple datasets that are aligned on the first - dimension (Currently, this requirement if left up to APIs to check and enforce). - These datasets represent different columns in the table. Apart from a column - that contains unique identifiers for each row, there are no other required datasets. - Users are free to add any number of custom VectorData objects (columns) here. - DynamicTable also supports ragged array columns, where each element can be of - a different size. To add a ragged array column, use a VectorIndex type to index - the corresponding VectorData type. See documentation for VectorData and VectorIndex - for more details. Unlike a compound data type, which is analogous to storing - an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. - This provides an alternative structure to choose from when optimizing storage - for anticipated access patterns. Additionally, this type provides a way of creating - a table without having to define a compound type up front. Although this convenience - may be attractive, users should think carefully about how data will be accessed. - DynamicTable is more appropriate for column-centric access, whereas a dataset - with a compound type would be more appropriate for row-centric access. Finally, - data size should also be taken into account. For small tables, performance loss - may be an acceptable trade-off for the flexibility of a DynamicTable. - is_a: Container - attributes: - name: - name: name - range: string - required: true - colnames: - name: colnames - description: The names of the columns in this table. This should be used to - specify an order to the columns. - range: text - description: - name: description - description: Description of what is in this dynamic table. - range: text - id: - name: id - description: Array of unique identifiers for the rows of this dynamic table. - array: - dimensions: - - alias: num_rows - range: int - required: true - multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml deleted file mode 100644 index 7befc87..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: hdmf-common -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-common -description: Common data structures provided by HDMF -id: hdmf-common -version: 1.2.0 -imports: -- hdmf-common.base -- hdmf-common.table -- hdmf-common.sparse -- hdmf-common.nwb.language -default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml deleted file mode 100644 index 17f8013..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: hdmf-common.base -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.base -version: 1.2.1 -imports: -- hdmf-common.nwb.language -default_prefix: hdmf-common.base/ -classes: - Data: - name: Data - description: An abstract data type for a dataset. - attributes: - name: - name: name - range: string - required: true - tree_root: true - Container: - name: Container - description: An abstract data type for a group storing collections of data and - metadata. Base type for all data and metadata containers. - attributes: - name: - name: name - range: string - required: true - tree_root: true - SimpleMultiContainer: - name: SimpleMultiContainer - description: A simple Container for holding onto multiple containers - is_a: Container - attributes: - children: - name: children - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml deleted file mode 100644 index 35776f9..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-common.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-common -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml deleted file mode 100644 index b480dbe..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml +++ /dev/null @@ -1,77 +0,0 @@ -name: hdmf-common.sparse -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.sparse -version: 1.2.1 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.sparse/ -classes: - CSRMatrix: - name: CSRMatrix - description: a compressed sparse row matrix - is_a: Container - attributes: - name: - name: name - range: string - required: true - shape: - name: shape - description: the shape of this sparse matrix - range: int - indices: - name: indices - description: column indices - range: CSRMatrix__indices - required: true - multivalued: false - indptr: - name: indptr - description: index pointer - range: CSRMatrix__indptr - required: true - multivalued: false - data: - name: data - description: values in the matrix - range: CSRMatrix__data - required: true - multivalued: false - tree_root: true - CSRMatrix__indices: - name: CSRMatrix__indices - description: column indices - attributes: - name: - name: name - ifabsent: string(indices) - range: string - required: true - equals_string: indices - CSRMatrix__indptr: - name: CSRMatrix__indptr - description: index pointer - attributes: - name: - name: name - ifabsent: string(indptr) - range: string - required: true - equals_string: indptr - CSRMatrix__data: - name: CSRMatrix__data - description: values in the matrix - attributes: - name: - name: name - ifabsent: string(data) - range: string - required: true - equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml deleted file mode 100644 index 4b3b3ef..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml +++ /dev/null @@ -1,181 +0,0 @@ -name: hdmf-common.table -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.table -version: 1.2.1 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.table/ -classes: - VectorData: - name: VectorData - description: An n-dimensional dataset representing a column of a DynamicTable. - If used without an accompanying VectorIndex, first dimension is along the rows - of the DynamicTable and each step along the first dimension is a cell of the - larger table. VectorData can also be used to represent a ragged array if paired - with a VectorIndex. This allows for storing arrays of varying length in a single - cell of the DynamicTable by indexing into this VectorData. The first vector - is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], - and so on. - is_a: Data - attributes: - name: - name: name - range: string - required: true - description: - name: description - description: Description of what these vectors represent. - range: text - array: - name: array - range: AnyType - any_of: - - array: - dimensions: - - alias: dim0 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - alias: dim3 - tree_root: true - VectorIndex: - name: VectorIndex - description: Used with VectorData to encode a ragged array. An array of indices - into the first dimension of the target VectorData, and forming a map between - the rows of a DynamicTable and the indices of the VectorData. The name of the - VectorIndex is expected to be the name of the target VectorData object followed - by "_index". - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - target: - name: target - description: Reference to the target dataset that this index applies to. - range: VectorData - tree_root: true - ElementIdentifiers: - name: ElementIdentifiers - description: A list of unique identifiers for values within a dataset, e.g. rows - of a DynamicTable. - is_a: Data - attributes: - name: - name: name - ifabsent: string(element_id) - range: string - required: true - tree_root: true - DynamicTableRegion: - name: DynamicTableRegion - description: DynamicTableRegion provides a link from one table to an index or - region of another. The `table` attribute is a link to another `DynamicTable`, - indicating which table is referenced, and the data is int(s) indicating the - row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to - associate rows with repeated meta-data without data duplication. They can also - be used to create hierarchical relationships between multiple `DynamicTable`s. - `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create - ragged references, so a single cell of a `DynamicTable` can reference many rows - of another `DynamicTable`. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - table: - name: table - description: Reference to the DynamicTable object that this region applies - to. - range: DynamicTable - description: - name: description - description: Description of what this table region points to. - range: text - tree_root: true - VocabData: - name: VocabData - description: Data that come from a controlled vocabulary of text values. A data - value of i corresponds to the i-th element in the 'vocabulary' array attribute. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - vocabulary: - name: vocabulary - description: The available items in the controlled vocabulary. - range: text - tree_root: true - DynamicTable: - name: DynamicTable - description: A group containing multiple datasets that are aligned on the first - dimension (Currently, this requirement if left up to APIs to check and enforce). - These datasets represent different columns in the table. Apart from a column - that contains unique identifiers for each row, there are no other required datasets. - Users are free to add any number of custom VectorData objects (columns) here. - DynamicTable also supports ragged array columns, where each element can be of - a different size. To add a ragged array column, use a VectorIndex type to index - the corresponding VectorData type. See documentation for VectorData and VectorIndex - for more details. Unlike a compound data type, which is analogous to storing - an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. - This provides an alternative structure to choose from when optimizing storage - for anticipated access patterns. Additionally, this type provides a way of creating - a table without having to define a compound type up front. Although this convenience - may be attractive, users should think carefully about how data will be accessed. - DynamicTable is more appropriate for column-centric access, whereas a dataset - with a compound type would be more appropriate for row-centric access. Finally, - data size should also be taken into account. For small tables, performance loss - may be an acceptable trade-off for the flexibility of a DynamicTable. - is_a: Container - attributes: - name: - name: name - range: string - required: true - colnames: - name: colnames - description: The names of the columns in this table. This should be used to - specify an order to the columns. - range: text - description: - name: description - description: Description of what is in this dynamic table. - range: text - id: - name: id - description: Array of unique identifiers for the rows of this dynamic table. - array: - dimensions: - - alias: num_rows - range: int - required: true - multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml deleted file mode 100644 index e29bfb9..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: hdmf-common -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-common -description: Common data structures provided by HDMF -id: hdmf-common -version: 1.2.1 -imports: -- hdmf-common.base -- hdmf-common.table -- hdmf-common.sparse -- hdmf-common.nwb.language -default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml deleted file mode 100644 index 22efa9c..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: hdmf-common.base -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.base -version: 1.3.0 -imports: -- hdmf-common.nwb.language -default_prefix: hdmf-common.base/ -classes: - Data: - name: Data - description: An abstract data type for a dataset. - attributes: - name: - name: name - range: string - required: true - tree_root: true - Container: - name: Container - description: An abstract data type for a group storing collections of data and - metadata. Base type for all data and metadata containers. - attributes: - name: - name: name - range: string - required: true - tree_root: true - SimpleMultiContainer: - name: SimpleMultiContainer - description: A simple Container for holding onto multiple containers. - is_a: Container - attributes: - children: - name: children - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml deleted file mode 100644 index 35776f9..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-common.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-common -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml deleted file mode 100644 index 918a6a5..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml +++ /dev/null @@ -1,158 +0,0 @@ -name: hdmf-common.resources -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.resources -version: 1.3.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.resources/ -classes: - ExternalResources: - name: ExternalResources - description: 'A set of four tables for tracking external resource references in - a file. NOTE: this data type is in beta testing and is subject to change in - a later version.' - is_a: Container - attributes: - name: - name: name - range: string - required: true - keys: - name: keys - description: A table for storing user terms that are used to refer to external - resources. - range: ExternalResources__keys - required: true - multivalued: false - resources: - name: resources - description: A table for mapping user terms (i.e., keys) to resource entities. - range: ExternalResources__resources - required: true - multivalued: false - objects: - name: objects - description: A table for identifying which objects in a file contain references - to external resources. - range: ExternalResources__objects - required: true - multivalued: false - object_keys: - name: object_keys - description: A table for identifying which objects use which keys. - range: ExternalResources__object_keys - required: true - multivalued: false - tree_root: true - ExternalResources__keys: - name: ExternalResources__keys - description: A table for storing user terms that are used to refer to external - resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(keys) - range: string - required: true - equals_string: keys - key_name: - name: key_name - description: The user term that maps to one or more resources in the 'resources' - table. - range: text - required: true - multivalued: false - ExternalResources__resources: - name: ExternalResources__resources - description: A table for mapping user terms (i.e., keys) to resource entities. - is_a: Data - attributes: - name: - name: name - ifabsent: string(resources) - range: string - required: true - equals_string: resources - keytable_idx: - name: keytable_idx - description: The index to the key in the 'keys' table. - range: uint - required: true - multivalued: false - resource_name: - name: resource_name - description: The name of the online resource (e.g., website, database) that - has the entity. - range: text - required: true - multivalued: false - resource_id: - name: resource_id - description: The unique identifier for the resource entity at the resource. - range: text - required: true - multivalued: false - uri: - name: uri - description: The URI for the resource entity this reference applies to. This - can be an empty string. - range: text - required: true - multivalued: false - ExternalResources__objects: - name: ExternalResources__objects - description: A table for identifying which objects in a file contain references - to external resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(objects) - range: string - required: true - equals_string: objects - object_id: - name: object_id - description: The UUID for the object. - range: text - required: true - multivalued: false - field: - name: field - description: The field of the object. This can be an empty string if the object - is a dataset and the field is the dataset values. - range: text - required: true - multivalued: false - ExternalResources__object_keys: - name: ExternalResources__object_keys - description: A table for identifying which objects use which keys. - is_a: Data - attributes: - name: - name: name - ifabsent: string(object_keys) - range: string - required: true - equals_string: object_keys - objecttable_idx: - name: objecttable_idx - description: The index to the 'objects' table for the object that holds the - key. - range: uint - required: true - multivalued: false - keytable_idx: - name: keytable_idx - description: The index to the 'keys' table for the key. - range: uint - required: true - multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml deleted file mode 100644 index 264e15d..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml +++ /dev/null @@ -1,66 +0,0 @@ -name: hdmf-common.sparse -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.sparse -version: 1.3.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.sparse/ -classes: - CSRMatrix: - name: CSRMatrix - description: A compressed sparse row matrix. Data are stored in the standard CSR - format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] - and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - is_a: Container - attributes: - name: - name: name - range: string - required: true - shape: - name: shape - description: The shape (number of rows, number of columns) of this sparse - matrix. - range: uint - indices: - name: indices - description: The column indices. - array: - dimensions: - - alias: number_of_non_zero_values - range: uint - required: true - multivalued: false - indptr: - name: indptr - description: The row index pointer. - array: - dimensions: - - alias: number_of_rows_in_the_matrix_1 - range: uint - required: true - multivalued: false - data: - name: data - description: The non-zero values in the matrix. - range: CSRMatrix__data - required: true - multivalued: false - tree_root: true - CSRMatrix__data: - name: CSRMatrix__data - description: The non-zero values in the matrix. - attributes: - name: - name: name - ifabsent: string(data) - range: string - required: true - equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml deleted file mode 100644 index 478c18d..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml +++ /dev/null @@ -1,181 +0,0 @@ -name: hdmf-common.table -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.table -version: 1.3.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.table/ -classes: - VectorData: - name: VectorData - description: An n-dimensional dataset representing a column of a DynamicTable. - If used without an accompanying VectorIndex, first dimension is along the rows - of the DynamicTable and each step along the first dimension is a cell of the - larger table. VectorData can also be used to represent a ragged array if paired - with a VectorIndex. This allows for storing arrays of varying length in a single - cell of the DynamicTable by indexing into this VectorData. The first vector - is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], - and so on. - is_a: Data - attributes: - name: - name: name - range: string - required: true - description: - name: description - description: Description of what these vectors represent. - range: text - array: - name: array - range: AnyType - any_of: - - array: - dimensions: - - alias: dim0 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - alias: dim3 - tree_root: true - VectorIndex: - name: VectorIndex - description: Used with VectorData to encode a ragged array. An array of indices - into the first dimension of the target VectorData, and forming a map between - the rows of a DynamicTable and the indices of the VectorData. The name of the - VectorIndex is expected to be the name of the target VectorData object followed - by "_index". - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - target: - name: target - description: Reference to the target dataset that this index applies to. - range: VectorData - tree_root: true - ElementIdentifiers: - name: ElementIdentifiers - description: A list of unique identifiers for values within a dataset, e.g. rows - of a DynamicTable. - is_a: Data - attributes: - name: - name: name - ifabsent: string(element_id) - range: string - required: true - tree_root: true - DynamicTableRegion: - name: DynamicTableRegion - description: DynamicTableRegion provides a link from one table to an index or - region of another. The `table` attribute is a link to another `DynamicTable`, - indicating which table is referenced, and the data is int(s) indicating the - row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to - associate rows with repeated meta-data without data duplication. They can also - be used to create hierarchical relationships between multiple `DynamicTable`s. - `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create - ragged references, so a single cell of a `DynamicTable` can reference many rows - of another `DynamicTable`. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - table: - name: table - description: Reference to the DynamicTable object that this region applies - to. - range: DynamicTable - description: - name: description - description: Description of what this table region points to. - range: text - tree_root: true - VocabData: - name: VocabData - description: Data that come from a controlled vocabulary of text values. A data - value of i corresponds to the i-th element in the 'vocabulary' array attribute. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - vocabulary: - name: vocabulary - description: The available items in the controlled vocabulary. - range: text - tree_root: true - DynamicTable: - name: DynamicTable - description: A group containing multiple datasets that are aligned on the first - dimension (Currently, this requirement if left up to APIs to check and enforce). - These datasets represent different columns in the table. Apart from a column - that contains unique identifiers for each row, there are no other required datasets. - Users are free to add any number of custom VectorData objects (columns) here. - DynamicTable also supports ragged array columns, where each element can be of - a different size. To add a ragged array column, use a VectorIndex type to index - the corresponding VectorData type. See documentation for VectorData and VectorIndex - for more details. Unlike a compound data type, which is analogous to storing - an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. - This provides an alternative structure to choose from when optimizing storage - for anticipated access patterns. Additionally, this type provides a way of creating - a table without having to define a compound type up front. Although this convenience - may be attractive, users should think carefully about how data will be accessed. - DynamicTable is more appropriate for column-centric access, whereas a dataset - with a compound type would be more appropriate for row-centric access. Finally, - data size should also be taken into account. For small tables, performance loss - may be an acceptable trade-off for the flexibility of a DynamicTable. - is_a: Container - attributes: - name: - name: name - range: string - required: true - colnames: - name: colnames - description: The names of the columns in this table. This should be used to - specify an order to the columns. - range: text - description: - name: description - description: Description of what is in this dynamic table. - range: text - id: - name: id - description: Array of unique identifiers for the rows of this dynamic table. - array: - dimensions: - - alias: num_rows - range: int - required: true - multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml deleted file mode 100644 index 11885e7..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml +++ /dev/null @@ -1,18 +0,0 @@ -name: hdmf-common -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-common -description: Common data structures provided by HDMF -id: hdmf-common -version: 1.3.0 -imports: -- hdmf-common.base -- hdmf-common.table -- hdmf-common.sparse -- hdmf-common.resources -- hdmf-common.nwb.language -default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml deleted file mode 100644 index ef51ebd..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: hdmf-common.base -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.base -version: 1.4.0 -imports: -- hdmf-common.nwb.language -default_prefix: hdmf-common.base/ -classes: - Data: - name: Data - description: An abstract data type for a dataset. - attributes: - name: - name: name - range: string - required: true - tree_root: true - Container: - name: Container - description: An abstract data type for a group storing collections of data and - metadata. Base type for all data and metadata containers. - attributes: - name: - name: name - range: string - required: true - tree_root: true - SimpleMultiContainer: - name: SimpleMultiContainer - description: A simple Container for holding onto multiple containers. - is_a: Container - attributes: - children: - name: children - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml deleted file mode 100644 index 98c00e4..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-common.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-experimental -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml deleted file mode 100644 index a004e10..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml +++ /dev/null @@ -1,66 +0,0 @@ -name: hdmf-common.sparse -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.sparse -version: 1.4.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.sparse/ -classes: - CSRMatrix: - name: CSRMatrix - description: A compressed sparse row matrix. Data are stored in the standard CSR - format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] - and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - is_a: Container - attributes: - name: - name: name - range: string - required: true - shape: - name: shape - description: The shape (number of rows, number of columns) of this sparse - matrix. - range: uint - indices: - name: indices - description: The column indices. - array: - dimensions: - - alias: number_of_non_zero_values - range: uint - required: true - multivalued: false - indptr: - name: indptr - description: The row index pointer. - array: - dimensions: - - alias: number_of_rows_in_the_matrix_1 - range: uint - required: true - multivalued: false - data: - name: data - description: The non-zero values in the matrix. - range: CSRMatrix__data - required: true - multivalued: false - tree_root: true - CSRMatrix__data: - name: CSRMatrix__data - description: The non-zero values in the matrix. - attributes: - name: - name: name - ifabsent: string(data) - range: string - required: true - equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml deleted file mode 100644 index ca9ead2..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml +++ /dev/null @@ -1,166 +0,0 @@ -name: hdmf-common.table -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.table -version: 1.4.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.table/ -classes: - VectorData: - name: VectorData - description: An n-dimensional dataset representing a column of a DynamicTable. - If used without an accompanying VectorIndex, first dimension is along the rows - of the DynamicTable and each step along the first dimension is a cell of the - larger table. VectorData can also be used to represent a ragged array if paired - with a VectorIndex. This allows for storing arrays of varying length in a single - cell of the DynamicTable by indexing into this VectorData. The first vector - is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], - and so on. - is_a: Data - attributes: - name: - name: name - range: string - required: true - description: - name: description - description: Description of what these vectors represent. - range: text - array: - name: array - range: AnyType - any_of: - - array: - dimensions: - - alias: dim0 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - alias: dim3 - tree_root: true - VectorIndex: - name: VectorIndex - description: Used with VectorData to encode a ragged array. An array of indices - into the first dimension of the target VectorData, and forming a map between - the rows of a DynamicTable and the indices of the VectorData. The name of the - VectorIndex is expected to be the name of the target VectorData object followed - by "_index". - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - target: - name: target - description: Reference to the target dataset that this index applies to. - range: VectorData - tree_root: true - ElementIdentifiers: - name: ElementIdentifiers - description: A list of unique identifiers for values within a dataset, e.g. rows - of a DynamicTable. - is_a: Data - attributes: - name: - name: name - ifabsent: string(element_id) - range: string - required: true - tree_root: true - DynamicTableRegion: - name: DynamicTableRegion - description: DynamicTableRegion provides a link from one table to an index or - region of another. The `table` attribute is a link to another `DynamicTable`, - indicating which table is referenced, and the data is int(s) indicating the - row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to - associate rows with repeated meta-data without data duplication. They can also - be used to create hierarchical relationships between multiple `DynamicTable`s. - `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create - ragged references, so a single cell of a `DynamicTable` can reference many rows - of another `DynamicTable`. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - table: - name: table - description: Reference to the DynamicTable object that this region applies - to. - range: DynamicTable - description: - name: description - description: Description of what this table region points to. - range: text - tree_root: true - DynamicTable: - name: DynamicTable - description: A group containing multiple datasets that are aligned on the first - dimension (Currently, this requirement if left up to APIs to check and enforce). - These datasets represent different columns in the table. Apart from a column - that contains unique identifiers for each row, there are no other required datasets. - Users are free to add any number of custom VectorData objects (columns) here. - DynamicTable also supports ragged array columns, where each element can be of - a different size. To add a ragged array column, use a VectorIndex type to index - the corresponding VectorData type. See documentation for VectorData and VectorIndex - for more details. Unlike a compound data type, which is analogous to storing - an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. - This provides an alternative structure to choose from when optimizing storage - for anticipated access patterns. Additionally, this type provides a way of creating - a table without having to define a compound type up front. Although this convenience - may be attractive, users should think carefully about how data will be accessed. - DynamicTable is more appropriate for column-centric access, whereas a dataset - with a compound type would be more appropriate for row-centric access. Finally, - data size should also be taken into account. For small tables, performance loss - may be an acceptable trade-off for the flexibility of a DynamicTable. - is_a: Container - attributes: - name: - name: name - range: string - required: true - colnames: - name: colnames - description: The names of the columns in this table. This should be used to - specify an order to the columns. - range: text - description: - name: description - description: Description of what is in this dynamic table. - range: text - id: - name: id - description: Array of unique identifiers for the rows of this dynamic table. - array: - dimensions: - - alias: num_rows - range: int - required: true - multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml deleted file mode 100644 index 50680da..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: hdmf-common -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-common -description: Common data structures provided by HDMF -id: hdmf-common -version: 1.4.0 -imports: -- hdmf-common.base -- hdmf-common.table -- hdmf-common.sparse -- hdmf-common.nwb.language -default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml index 98c00e4..c7a8401 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-experimental + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml deleted file mode 100644 index 75e5a6c..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: hdmf-common.base -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.base -version: 1.5.1 -imports: -- hdmf-common.nwb.language -default_prefix: hdmf-common.base/ -classes: - Data: - name: Data - description: An abstract data type for a dataset. - attributes: - name: - name: name - range: string - required: true - tree_root: true - Container: - name: Container - description: An abstract data type for a group storing collections of data and - metadata. Base type for all data and metadata containers. - attributes: - name: - name: name - range: string - required: true - tree_root: true - SimpleMultiContainer: - name: SimpleMultiContainer - description: A simple Container for holding onto multiple containers. - is_a: Container - attributes: - children: - name: children - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml deleted file mode 100644 index 98c00e4..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-common.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-experimental -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml deleted file mode 100644 index 380bc7e..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml +++ /dev/null @@ -1,66 +0,0 @@ -name: hdmf-common.sparse -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.sparse -version: 1.5.1 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.sparse/ -classes: - CSRMatrix: - name: CSRMatrix - description: A compressed sparse row matrix. Data are stored in the standard CSR - format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] - and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - is_a: Container - attributes: - name: - name: name - range: string - required: true - shape: - name: shape - description: The shape (number of rows, number of columns) of this sparse - matrix. - range: uint - indices: - name: indices - description: The column indices. - array: - dimensions: - - alias: number_of_non_zero_values - range: uint - required: true - multivalued: false - indptr: - name: indptr - description: The row index pointer. - array: - dimensions: - - alias: number_of_rows_in_the_matrix_1 - range: uint - required: true - multivalued: false - data: - name: data - description: The non-zero values in the matrix. - range: CSRMatrix__data - required: true - multivalued: false - tree_root: true - CSRMatrix__data: - name: CSRMatrix__data - description: The non-zero values in the matrix. - attributes: - name: - name: name - ifabsent: string(data) - range: string - required: true - equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml deleted file mode 100644 index 557721d..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml +++ /dev/null @@ -1,185 +0,0 @@ -name: hdmf-common.table -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.table -version: 1.5.1 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.table/ -classes: - VectorData: - name: VectorData - description: An n-dimensional dataset representing a column of a DynamicTable. - If used without an accompanying VectorIndex, first dimension is along the rows - of the DynamicTable and each step along the first dimension is a cell of the - larger table. VectorData can also be used to represent a ragged array if paired - with a VectorIndex. This allows for storing arrays of varying length in a single - cell of the DynamicTable by indexing into this VectorData. The first vector - is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], - and so on. - is_a: Data - attributes: - name: - name: name - range: string - required: true - description: - name: description - description: Description of what these vectors represent. - range: text - array: - name: array - range: AnyType - any_of: - - array: - dimensions: - - alias: dim0 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - alias: dim3 - tree_root: true - VectorIndex: - name: VectorIndex - description: Used with VectorData to encode a ragged array. An array of indices - into the first dimension of the target VectorData, and forming a map between - the rows of a DynamicTable and the indices of the VectorData. The name of the - VectorIndex is expected to be the name of the target VectorData object followed - by "_index". - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - target: - name: target - description: Reference to the target dataset that this index applies to. - range: VectorData - tree_root: true - ElementIdentifiers: - name: ElementIdentifiers - description: A list of unique identifiers for values within a dataset, e.g. rows - of a DynamicTable. - is_a: Data - attributes: - name: - name: name - ifabsent: string(element_id) - range: string - required: true - tree_root: true - DynamicTableRegion: - name: DynamicTableRegion - description: DynamicTableRegion provides a link from one table to an index or - region of another. The `table` attribute is a link to another `DynamicTable`, - indicating which table is referenced, and the data is int(s) indicating the - row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to - associate rows with repeated meta-data without data duplication. They can also - be used to create hierarchical relationships between multiple `DynamicTable`s. - `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create - ragged references, so a single cell of a `DynamicTable` can reference many rows - of another `DynamicTable`. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - table: - name: table - description: Reference to the DynamicTable object that this region applies - to. - range: DynamicTable - description: - name: description - description: Description of what this table region points to. - range: text - tree_root: true - DynamicTable: - name: DynamicTable - description: A group containing multiple datasets that are aligned on the first - dimension (Currently, this requirement if left up to APIs to check and enforce). - These datasets represent different columns in the table. Apart from a column - that contains unique identifiers for each row, there are no other required datasets. - Users are free to add any number of custom VectorData objects (columns) here. - DynamicTable also supports ragged array columns, where each element can be of - a different size. To add a ragged array column, use a VectorIndex type to index - the corresponding VectorData type. See documentation for VectorData and VectorIndex - for more details. Unlike a compound data type, which is analogous to storing - an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. - This provides an alternative structure to choose from when optimizing storage - for anticipated access patterns. Additionally, this type provides a way of creating - a table without having to define a compound type up front. Although this convenience - may be attractive, users should think carefully about how data will be accessed. - DynamicTable is more appropriate for column-centric access, whereas a dataset - with a compound type would be more appropriate for row-centric access. Finally, - data size should also be taken into account. For small tables, performance loss - may be an acceptable trade-off for the flexibility of a DynamicTable. - is_a: Container - attributes: - name: - name: name - range: string - required: true - colnames: - name: colnames - description: The names of the columns in this table. This should be used to - specify an order to the columns. - range: text - description: - name: description - description: Description of what is in this dynamic table. - range: text - id: - name: id - description: Array of unique identifiers for the rows of this dynamic table. - array: - dimensions: - - alias: num_rows - range: int - required: true - multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - tree_root: true - AlignedDynamicTable: - name: AlignedDynamicTable - description: DynamicTable container that supports storing a collection of sub-tables. - Each sub-table is a DynamicTable itself that is aligned with the main table - by row index. I.e., all DynamicTables stored in this group MUST have the same - number of rows. This type effectively defines a 2-level table in which the main - data is stored in the main table implemented by this type and additional columns - of the table are grouped into categories, with each category being represented - by a separate DynamicTable stored within the group. - is_a: DynamicTable - attributes: - children: - name: children - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: DynamicTable - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml deleted file mode 100644 index 917870d..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: hdmf-common -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-common -description: Common data structures provided by HDMF -id: hdmf-common -version: 1.5.1 -imports: -- hdmf-common.base -- hdmf-common.table -- hdmf-common.sparse -- hdmf-common.nwb.language -default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml deleted file mode 100644 index 7031d84..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: hdmf-common.base -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.base -version: 1.6.0 -imports: -- hdmf-common.nwb.language -default_prefix: hdmf-common.base/ -classes: - Data: - name: Data - description: An abstract data type for a dataset. - attributes: - name: - name: name - range: string - required: true - tree_root: true - Container: - name: Container - description: An abstract data type for a group storing collections of data and - metadata. Base type for all data and metadata containers. - attributes: - name: - name: name - range: string - required: true - tree_root: true - SimpleMultiContainer: - name: SimpleMultiContainer - description: A simple Container for holding onto multiple containers. - is_a: Container - attributes: - children: - name: children - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml deleted file mode 100644 index 98c00e4..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-common.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-experimental -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml deleted file mode 100644 index e2e8cff..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml +++ /dev/null @@ -1,66 +0,0 @@ -name: hdmf-common.sparse -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.sparse -version: 1.6.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.sparse/ -classes: - CSRMatrix: - name: CSRMatrix - description: A compressed sparse row matrix. Data are stored in the standard CSR - format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] - and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - is_a: Container - attributes: - name: - name: name - range: string - required: true - shape: - name: shape - description: The shape (number of rows, number of columns) of this sparse - matrix. - range: uint - indices: - name: indices - description: The column indices. - array: - dimensions: - - alias: number_of_non_zero_values - range: uint - required: true - multivalued: false - indptr: - name: indptr - description: The row index pointer. - array: - dimensions: - - alias: number_of_rows_in_the_matrix_1 - range: uint - required: true - multivalued: false - data: - name: data - description: The non-zero values in the matrix. - range: CSRMatrix__data - required: true - multivalued: false - tree_root: true - CSRMatrix__data: - name: CSRMatrix__data - description: The non-zero values in the matrix. - attributes: - name: - name: name - ifabsent: string(data) - range: string - required: true - equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml deleted file mode 100644 index 100ea47..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml +++ /dev/null @@ -1,185 +0,0 @@ -name: hdmf-common.table -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.table -version: 1.6.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.table/ -classes: - VectorData: - name: VectorData - description: An n-dimensional dataset representing a column of a DynamicTable. - If used without an accompanying VectorIndex, first dimension is along the rows - of the DynamicTable and each step along the first dimension is a cell of the - larger table. VectorData can also be used to represent a ragged array if paired - with a VectorIndex. This allows for storing arrays of varying length in a single - cell of the DynamicTable by indexing into this VectorData. The first vector - is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], - and so on. - is_a: Data - attributes: - name: - name: name - range: string - required: true - description: - name: description - description: Description of what these vectors represent. - range: text - array: - name: array - range: AnyType - any_of: - - array: - dimensions: - - alias: dim0 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - alias: dim3 - tree_root: true - VectorIndex: - name: VectorIndex - description: Used with VectorData to encode a ragged array. An array of indices - into the first dimension of the target VectorData, and forming a map between - the rows of a DynamicTable and the indices of the VectorData. The name of the - VectorIndex is expected to be the name of the target VectorData object followed - by "_index". - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - target: - name: target - description: Reference to the target dataset that this index applies to. - range: VectorData - tree_root: true - ElementIdentifiers: - name: ElementIdentifiers - description: A list of unique identifiers for values within a dataset, e.g. rows - of a DynamicTable. - is_a: Data - attributes: - name: - name: name - ifabsent: string(element_id) - range: string - required: true - tree_root: true - DynamicTableRegion: - name: DynamicTableRegion - description: DynamicTableRegion provides a link from one table to an index or - region of another. The `table` attribute is a link to another `DynamicTable`, - indicating which table is referenced, and the data is int(s) indicating the - row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to - associate rows with repeated meta-data without data duplication. They can also - be used to create hierarchical relationships between multiple `DynamicTable`s. - `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create - ragged references, so a single cell of a `DynamicTable` can reference many rows - of another `DynamicTable`. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - table: - name: table - description: Reference to the DynamicTable object that this region applies - to. - range: DynamicTable - description: - name: description - description: Description of what this table region points to. - range: text - tree_root: true - DynamicTable: - name: DynamicTable - description: A group containing multiple datasets that are aligned on the first - dimension (Currently, this requirement if left up to APIs to check and enforce). - These datasets represent different columns in the table. Apart from a column - that contains unique identifiers for each row, there are no other required datasets. - Users are free to add any number of custom VectorData objects (columns) here. - DynamicTable also supports ragged array columns, where each element can be of - a different size. To add a ragged array column, use a VectorIndex type to index - the corresponding VectorData type. See documentation for VectorData and VectorIndex - for more details. Unlike a compound data type, which is analogous to storing - an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. - This provides an alternative structure to choose from when optimizing storage - for anticipated access patterns. Additionally, this type provides a way of creating - a table without having to define a compound type up front. Although this convenience - may be attractive, users should think carefully about how data will be accessed. - DynamicTable is more appropriate for column-centric access, whereas a dataset - with a compound type would be more appropriate for row-centric access. Finally, - data size should also be taken into account. For small tables, performance loss - may be an acceptable trade-off for the flexibility of a DynamicTable. - is_a: Container - attributes: - name: - name: name - range: string - required: true - colnames: - name: colnames - description: The names of the columns in this table. This should be used to - specify an order to the columns. - range: text - description: - name: description - description: Description of what is in this dynamic table. - range: text - id: - name: id - description: Array of unique identifiers for the rows of this dynamic table. - array: - dimensions: - - alias: num_rows - range: int - required: true - multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - tree_root: true - AlignedDynamicTable: - name: AlignedDynamicTable - description: DynamicTable container that supports storing a collection of sub-tables. - Each sub-table is a DynamicTable itself that is aligned with the main table - by row index. I.e., all DynamicTables stored in this group MUST have the same - number of rows. This type effectively defines a 2-level table in which the main - data is stored in the main table implemented by this type and additional columns - of the table are grouped into categories, with each category being represented - by a separate DynamicTable stored within the group. - is_a: DynamicTable - attributes: - children: - name: children - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: DynamicTable - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml deleted file mode 100644 index 241b849..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: hdmf-common -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-common -description: Common data structures provided by HDMF -id: hdmf-common -version: 1.6.0 -imports: -- hdmf-common.base -- hdmf-common.table -- hdmf-common.sparse -- hdmf-common.nwb.language -default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml deleted file mode 100644 index acf8a08..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml +++ /dev/null @@ -1,46 +0,0 @@ -name: hdmf-common.base -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.base -version: 1.7.0 -imports: -- hdmf-common.nwb.language -default_prefix: hdmf-common.base/ -classes: - Data: - name: Data - description: An abstract data type for a dataset. - attributes: - name: - name: name - range: string - required: true - tree_root: true - Container: - name: Container - description: An abstract data type for a group storing collections of data and - metadata. Base type for all data and metadata containers. - attributes: - name: - name: name - range: string - required: true - tree_root: true - SimpleMultiContainer: - name: SimpleMultiContainer - description: A simple Container for holding onto multiple containers. - is_a: Container - attributes: - children: - name: children - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: Container - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml deleted file mode 100644 index 98c00e4..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-common.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-experimental -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml deleted file mode 100644 index e258d51..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml +++ /dev/null @@ -1,66 +0,0 @@ -name: hdmf-common.sparse -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.sparse -version: 1.7.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.sparse/ -classes: - CSRMatrix: - name: CSRMatrix - description: A compressed sparse row matrix. Data are stored in the standard CSR - format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] - and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. - is_a: Container - attributes: - name: - name: name - range: string - required: true - shape: - name: shape - description: The shape (number of rows, number of columns) of this sparse - matrix. - range: uint - indices: - name: indices - description: The column indices. - array: - dimensions: - - alias: number_of_non_zero_values - range: uint - required: true - multivalued: false - indptr: - name: indptr - description: The row index pointer. - array: - dimensions: - - alias: number_of_rows_in_the_matrix_1 - range: uint - required: true - multivalued: false - data: - name: data - description: The non-zero values in the matrix. - range: CSRMatrix__data - required: true - multivalued: false - tree_root: true - CSRMatrix__data: - name: CSRMatrix__data - description: The non-zero values in the matrix. - attributes: - name: - name: name - ifabsent: string(data) - range: string - required: true - equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml deleted file mode 100644 index 3dba25e..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml +++ /dev/null @@ -1,185 +0,0 @@ -name: hdmf-common.table -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-common -id: hdmf-common.table -version: 1.7.0 -imports: -- hdmf-common.base -- hdmf-common.nwb.language -default_prefix: hdmf-common.table/ -classes: - VectorData: - name: VectorData - description: An n-dimensional dataset representing a column of a DynamicTable. - If used without an accompanying VectorIndex, first dimension is along the rows - of the DynamicTable and each step along the first dimension is a cell of the - larger table. VectorData can also be used to represent a ragged array if paired - with a VectorIndex. This allows for storing arrays of varying length in a single - cell of the DynamicTable by indexing into this VectorData. The first vector - is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], - and so on. - is_a: Data - attributes: - name: - name: name - range: string - required: true - description: - name: description - description: Description of what these vectors represent. - range: text - array: - name: array - range: AnyType - any_of: - - array: - dimensions: - - alias: dim0 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - array: - dimensions: - - alias: dim0 - - alias: dim1 - - alias: dim2 - - alias: dim3 - tree_root: true - VectorIndex: - name: VectorIndex - description: Used with VectorData to encode a ragged array. An array of indices - into the first dimension of the target VectorData, and forming a map between - the rows of a DynamicTable and the indices of the VectorData. The name of the - VectorIndex is expected to be the name of the target VectorData object followed - by "_index". - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - target: - name: target - description: Reference to the target dataset that this index applies to. - range: VectorData - tree_root: true - ElementIdentifiers: - name: ElementIdentifiers - description: A list of unique identifiers for values within a dataset, e.g. rows - of a DynamicTable. - is_a: Data - attributes: - name: - name: name - ifabsent: string(element_id) - range: string - required: true - tree_root: true - DynamicTableRegion: - name: DynamicTableRegion - description: DynamicTableRegion provides a link from one table to an index or - region of another. The `table` attribute is a link to another `DynamicTable`, - indicating which table is referenced, and the data is int(s) indicating the - row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to - associate rows with repeated meta-data without data duplication. They can also - be used to create hierarchical relationships between multiple `DynamicTable`s. - `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create - ragged references, so a single cell of a `DynamicTable` can reference many rows - of another `DynamicTable`. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - table: - name: table - description: Reference to the DynamicTable object that this region applies - to. - range: DynamicTable - description: - name: description - description: Description of what this table region points to. - range: text - tree_root: true - DynamicTable: - name: DynamicTable - description: A group containing multiple datasets that are aligned on the first - dimension (Currently, this requirement if left up to APIs to check and enforce). - These datasets represent different columns in the table. Apart from a column - that contains unique identifiers for each row, there are no other required datasets. - Users are free to add any number of custom VectorData objects (columns) here. - DynamicTable also supports ragged array columns, where each element can be of - a different size. To add a ragged array column, use a VectorIndex type to index - the corresponding VectorData type. See documentation for VectorData and VectorIndex - for more details. Unlike a compound data type, which is analogous to storing - an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. - This provides an alternative structure to choose from when optimizing storage - for anticipated access patterns. Additionally, this type provides a way of creating - a table without having to define a compound type up front. Although this convenience - may be attractive, users should think carefully about how data will be accessed. - DynamicTable is more appropriate for column-centric access, whereas a dataset - with a compound type would be more appropriate for row-centric access. Finally, - data size should also be taken into account. For small tables, performance loss - may be an acceptable trade-off for the flexibility of a DynamicTable. - is_a: Container - attributes: - name: - name: name - range: string - required: true - colnames: - name: colnames - description: The names of the columns in this table. This should be used to - specify an order to the columns. - range: text - description: - name: description - description: Description of what is in this dynamic table. - range: text - id: - name: id - description: Array of unique identifiers for the rows of this dynamic table. - array: - dimensions: - - alias: num_rows - range: int - required: true - multivalued: false - vector_data: - name: vector_data - description: Vector columns, including index columns, of this dynamic table. - range: VectorData - required: false - multivalued: true - tree_root: true - AlignedDynamicTable: - name: AlignedDynamicTable - description: DynamicTable container that supports storing a collection of sub-tables. - Each sub-table is a DynamicTable itself that is aligned with the main table - by row index. I.e., all DynamicTables stored in this group MUST have the same - number of rows. This type effectively defines a 2-level table in which the main - data is stored in the main table implemented by this type and additional columns - of the table are grouped into categories, with each category being represented - by a separate DynamicTable stored within the group. - is_a: DynamicTable - attributes: - children: - name: children - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: DynamicTable - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml deleted file mode 100644 index b689554..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: hdmf-common -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-common -description: Common data structures provided by HDMF -id: hdmf-common -version: 1.7.0 -imports: -- hdmf-common.base -- hdmf-common.table -- hdmf-common.sparse -- hdmf-common.nwb.language -default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml index 98c00e4..c7a8401 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-experimental + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml index 6b54542..c14e264 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.experimental version: 0.1.0 imports: -- ../../hdmf_common/v1_4_0/namespace +- ../../hdmf_common/v1_5_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.experimental/ classes: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml index a884e44..bab9ede 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-experimental + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml index 89ffc2c..17a7d9d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.resources version: 0.1.0 imports: -- ../../hdmf_common/v1_4_0/namespace +- ../../hdmf_common/v1_5_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.resources/ classes: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml deleted file mode 100644 index daf947b..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: hdmf-experimental.experimental -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-experimental -id: hdmf-experimental.experimental -version: 0.2.0 -imports: -- ../../hdmf_common/v1_5_1/namespace -- hdmf-experimental.nwb.language -default_prefix: hdmf-experimental.experimental/ -classes: - EnumData: - name: EnumData - description: Data that come from a fixed set of values. A data value of i corresponds - to the i-th value in the VectorData referenced by the 'elements' attribute. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - elements: - name: elements - description: Reference to the VectorData object that contains the enumerable - elements - range: VectorData - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml deleted file mode 100644 index a884e44..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-experimental.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-experimental -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml deleted file mode 100644 index c2fc8d8..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml +++ /dev/null @@ -1,196 +0,0 @@ -name: hdmf-experimental.resources -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-experimental -id: hdmf-experimental.resources -version: 0.2.0 -imports: -- ../../hdmf_common/v1_5_1/namespace -- hdmf-experimental.nwb.language -default_prefix: hdmf-experimental.resources/ -classes: - ExternalResources: - name: ExternalResources - description: 'A set of four tables for tracking external resource references in - a file. NOTE: this data type is in beta testing and is subject to change in - a later version.' - is_a: Container - attributes: - name: - name: name - range: string - required: true - keys: - name: keys - description: A table for storing user terms that are used to refer to external - resources. - range: ExternalResources__keys - required: true - multivalued: false - entities: - name: entities - description: A table for mapping user terms (i.e., keys) to resource entities. - range: ExternalResources__entities - required: true - multivalued: false - resources: - name: resources - description: A table for mapping user terms (i.e., keys) to resource entities. - range: ExternalResources__resources - required: true - multivalued: false - objects: - name: objects - description: A table for identifying which objects in a file contain references - to external resources. - range: ExternalResources__objects - required: true - multivalued: false - object_keys: - name: object_keys - description: A table for identifying which objects use which keys. - range: ExternalResources__object_keys - required: true - multivalued: false - tree_root: true - ExternalResources__keys: - name: ExternalResources__keys - description: A table for storing user terms that are used to refer to external - resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(keys) - range: string - required: true - equals_string: keys - key: - name: key - description: The user term that maps to one or more resources in the 'resources' - table. - range: text - required: true - multivalued: false - ExternalResources__entities: - name: ExternalResources__entities - description: A table for mapping user terms (i.e., keys) to resource entities. - is_a: Data - attributes: - name: - name: name - ifabsent: string(entities) - range: string - required: true - equals_string: entities - keys_idx: - name: keys_idx - description: The index to the key in the 'keys' table. - range: uint - required: true - multivalued: false - resources_idx: - name: resources_idx - description: The index into the 'resources' table - range: uint - required: true - multivalued: false - entity_id: - name: entity_id - description: The unique identifier entity. - range: text - required: true - multivalued: false - entity_uri: - name: entity_uri - description: The URI for the entity this reference applies to. This can be - an empty string. - range: text - required: true - multivalued: false - ExternalResources__resources: - name: ExternalResources__resources - description: A table for mapping user terms (i.e., keys) to resource entities. - is_a: Data - attributes: - name: - name: name - ifabsent: string(resources) - range: string - required: true - equals_string: resources - resource: - name: resource - description: The name of the resource. - range: text - required: true - multivalued: false - resource_uri: - name: resource_uri - description: The URI for the resource. This can be an empty string. - range: text - required: true - multivalued: false - ExternalResources__objects: - name: ExternalResources__objects - description: A table for identifying which objects in a file contain references - to external resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(objects) - range: string - required: true - equals_string: objects - object_id: - name: object_id - description: The UUID for the object. - range: text - required: true - multivalued: false - relative_path: - name: relative_path - description: The relative path from the container with the object_id to the - dataset or attribute with the value(s) that is associated with an external - resource. This can be an empty string if the container is a dataset which - contains the value(s) that is associated with an external resource. - range: text - required: true - multivalued: false - field: - name: field - description: The field of the compound data type using an external resource. - This is used only if the dataset or attribute is a compound data type; otherwise - this should be an empty string. - range: text - required: true - multivalued: false - ExternalResources__object_keys: - name: ExternalResources__object_keys - description: A table for identifying which objects use which keys. - is_a: Data - attributes: - name: - name: name - ifabsent: string(object_keys) - range: string - required: true - equals_string: object_keys - objects_idx: - name: objects_idx - description: The index to the 'objects' table for the object that holds the - key. - range: uint - required: true - multivalued: false - keys_idx: - name: keys_idx - description: The index to the 'keys' table for the key. - range: uint - required: true - multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml deleted file mode 100644 index 6a311e0..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: hdmf-experimental -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-experimental -description: Experimental data structures provided by HDMF. These are not guaranteed - to be available in the future. -id: hdmf-experimental -version: 0.2.0 -imports: -- hdmf-experimental.experimental -- hdmf-experimental.resources -- hdmf-experimental.nwb.language -default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml deleted file mode 100644 index fb98004..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: hdmf-experimental.experimental -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-experimental -id: hdmf-experimental.experimental -version: 0.3.0 -imports: -- ../../hdmf_common/v1_6_0/namespace -- hdmf-experimental.nwb.language -default_prefix: hdmf-experimental.experimental/ -classes: - EnumData: - name: EnumData - description: Data that come from a fixed set of values. A data value of i corresponds - to the i-th value in the VectorData referenced by the 'elements' attribute. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - elements: - name: elements - description: Reference to the VectorData object that contains the enumerable - elements - range: VectorData - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml deleted file mode 100644 index a884e44..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-experimental.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-experimental -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml deleted file mode 100644 index 350ef24..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml +++ /dev/null @@ -1,199 +0,0 @@ -name: hdmf-experimental.resources -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-experimental -id: hdmf-experimental.resources -version: 0.3.0 -imports: -- ../../hdmf_common/v1_6_0/namespace -- hdmf-experimental.nwb.language -default_prefix: hdmf-experimental.resources/ -classes: - ExternalResources: - name: ExternalResources - description: 'A set of five tables for tracking external resource references in - a file. NOTE: this data type is experimental and is subject to change in a later - version.' - is_a: Container - attributes: - name: - name: name - range: string - required: true - keys: - name: keys - description: A table for storing user terms that are used to refer to external - resources. - range: ExternalResources__keys - required: true - multivalued: false - files: - name: files - description: A table for storing object ids of files used in external resources. - range: ExternalResources__files - required: true - multivalued: false - entities: - name: entities - description: A table for mapping user terms (i.e., keys) to resource entities. - range: ExternalResources__entities - required: true - multivalued: false - objects: - name: objects - description: A table for identifying which objects in a file contain references - to external resources. - range: ExternalResources__objects - required: true - multivalued: false - object_keys: - name: object_keys - description: A table for identifying which objects use which keys. - range: ExternalResources__object_keys - required: true - multivalued: false - tree_root: true - ExternalResources__keys: - name: ExternalResources__keys - description: A table for storing user terms that are used to refer to external - resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(keys) - range: string - required: true - equals_string: keys - key: - name: key - description: The user term that maps to one or more resources in the `resources` - table, e.g., "human". - range: text - required: true - multivalued: false - ExternalResources__files: - name: ExternalResources__files - description: A table for storing object ids of files used in external resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(files) - range: string - required: true - equals_string: files - file_object_id: - name: file_object_id - description: The object id (UUID) of a file that contains objects that refers - to external resources. - range: text - required: true - multivalued: false - ExternalResources__entities: - name: ExternalResources__entities - description: A table for mapping user terms (i.e., keys) to resource entities. - is_a: Data - attributes: - name: - name: name - ifabsent: string(entities) - range: string - required: true - equals_string: entities - keys_idx: - name: keys_idx - description: The row index to the key in the `keys` table. - range: uint - required: true - multivalued: false - entity_id: - name: entity_id - description: The compact uniform resource identifier (CURIE) of the entity, - in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. - range: text - required: true - multivalued: false - entity_uri: - name: entity_uri - description: The URI for the entity this reference applies to. This can be - an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 - range: text - required: true - multivalued: false - ExternalResources__objects: - name: ExternalResources__objects - description: A table for identifying which objects in a file contain references - to external resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(objects) - range: string - required: true - equals_string: objects - files_idx: - name: files_idx - description: The row index to the file in the `files` table containing the - object. - range: uint - required: true - multivalued: false - object_id: - name: object_id - description: The object id (UUID) of the object. - range: text - required: true - multivalued: false - object_type: - name: object_type - description: The data type of the object. - range: text - required: true - multivalued: false - relative_path: - name: relative_path - description: The relative path from the data object with the `object_id` to - the dataset or attribute with the value(s) that is associated with an external - resource. This can be an empty string if the object is a dataset that contains - the value(s) that is associated with an external resource. - range: text - required: true - multivalued: false - field: - name: field - description: The field within the compound data type using an external resource. - This is used only if the dataset or attribute is a compound data type; otherwise - this should be an empty string. - range: text - required: true - multivalued: false - ExternalResources__object_keys: - name: ExternalResources__object_keys - description: A table for identifying which objects use which keys. - is_a: Data - attributes: - name: - name: name - ifabsent: string(object_keys) - range: string - required: true - equals_string: object_keys - objects_idx: - name: objects_idx - description: The row index to the object in the `objects` table that holds - the key - range: uint - required: true - multivalued: false - keys_idx: - name: keys_idx - description: The row index to the key in the `keys` table. - range: uint - required: true - multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml deleted file mode 100644 index fe62e64..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: hdmf-experimental -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-experimental -description: Experimental data structures provided by HDMF. These are not guaranteed - to be available in the future. -id: hdmf-experimental -version: 0.3.0 -imports: -- hdmf-experimental.experimental -- hdmf-experimental.resources -- hdmf-experimental.nwb.language -default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml deleted file mode 100644 index 31c2867..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: hdmf-experimental.experimental -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-experimental -id: hdmf-experimental.experimental -version: 0.4.0 -imports: -- ../../hdmf_common/v1_7_0/namespace -- hdmf-experimental.nwb.language -default_prefix: hdmf-experimental.experimental/ -classes: - EnumData: - name: EnumData - description: Data that come from a fixed set of values. A data value of i corresponds - to the i-th value in the VectorData referenced by the 'elements' attribute. - is_a: VectorData - attributes: - name: - name: name - range: string - required: true - elements: - name: elements - description: Reference to the VectorData object that contains the enumerable - elements - range: VectorData - tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml deleted file mode 100644 index a884e44..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml +++ /dev/null @@ -1,109 +0,0 @@ -name: hdmf-experimental.nwb.language -annotations: - is_namespace: - tag: is_namespace - value: 'False' - namespace: - tag: namespace - value: hdmf-experimental -description: Adapter objects to mimic the behavior of elements in the nwb-schema-language -id: nwb.language -imports: -- linkml:types -prefixes: - linkml: - prefix_prefix: linkml - prefix_reference: https://w3id.org/linkml -default_prefix: nwb.language/ -types: - float32: - name: float32 - typeof: float - repr: np.float32 - float64: - name: float64 - typeof: double - repr: np.float64 - long: - name: long - typeof: integer - repr: np.longlong - int64: - name: int64 - typeof: integer - repr: np.int64 - int: - name: int - typeof: integer - int32: - name: int32 - typeof: integer - repr: np.int32 - int16: - name: int16 - typeof: integer - repr: np.int16 - short: - name: short - typeof: integer - repr: np.int16 - int8: - name: int8 - typeof: integer - repr: np.int8 - uint: - name: uint - typeof: integer - repr: np.uint64 - minimum_value: 0 - uint32: - name: uint32 - typeof: integer - repr: np.uint32 - minimum_value: 0 - uint16: - name: uint16 - typeof: integer - repr: np.uint16 - minimum_value: 0 - uint8: - name: uint8 - typeof: integer - repr: np.uint8 - minimum_value: 0 - uint64: - name: uint64 - typeof: integer - repr: np.uint64 - minimum_value: 0 - numeric: - name: numeric - typeof: float - repr: np.number - text: - name: text - typeof: string - utf: - name: utf - typeof: string - utf8: - name: utf8 - typeof: string - utf_8: - name: utf_8 - typeof: string - ascii: - name: ascii - typeof: string - bool: - name: bool - typeof: boolean - isodatetime: - name: isodatetime - typeof: datetime - repr: np.datetime64 -classes: - AnyType: - name: AnyType - description: Needed because some classes in hdmf-common are datasets without dtype - class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml deleted file mode 100644 index 8768e73..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml +++ /dev/null @@ -1,222 +0,0 @@ -name: hdmf-experimental.resources -annotations: - is_namespace: - tag: is_namespace - value: false - namespace: - tag: namespace - value: hdmf-experimental -id: hdmf-experimental.resources -version: 0.4.0 -imports: -- ../../hdmf_common/v1_7_0/namespace -- hdmf-experimental.nwb.language -default_prefix: hdmf-experimental.resources/ -classes: - ExternalResources: - name: ExternalResources - description: 'A set of five tables for tracking external resource references in - a file. NOTE: this data type is experimental and is subject to change in a later - version.' - is_a: Container - attributes: - name: - name: name - range: string - required: true - keys: - name: keys - description: A table for storing user terms that are used to refer to external - resources. - range: ExternalResources__keys - required: true - multivalued: false - files: - name: files - description: A table for storing object ids of files used in external resources. - range: ExternalResources__files - required: true - multivalued: false - entities: - name: entities - description: A table for mapping user terms (i.e., keys) to resource entities. - range: ExternalResources__entities - required: true - multivalued: false - objects: - name: objects - description: A table for identifying which objects in a file contain references - to external resources. - range: ExternalResources__objects - required: true - multivalued: false - object_keys: - name: object_keys - description: A table for identifying which objects use which keys. - range: ExternalResources__object_keys - required: true - multivalued: false - entity_keys: - name: entity_keys - description: A table for identifying which keys use which entity. - range: ExternalResources__entity_keys - required: true - multivalued: false - tree_root: true - ExternalResources__keys: - name: ExternalResources__keys - description: A table for storing user terms that are used to refer to external - resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(keys) - range: string - required: true - equals_string: keys - key: - name: key - description: The user term that maps to one or more resources in the `resources` - table, e.g., "human". - range: text - required: true - multivalued: false - ExternalResources__files: - name: ExternalResources__files - description: A table for storing object ids of files used in external resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(files) - range: string - required: true - equals_string: files - file_object_id: - name: file_object_id - description: The object id (UUID) of a file that contains objects that refers - to external resources. - range: text - required: true - multivalued: false - ExternalResources__entities: - name: ExternalResources__entities - description: A table for mapping user terms (i.e., keys) to resource entities. - is_a: Data - attributes: - name: - name: name - ifabsent: string(entities) - range: string - required: true - equals_string: entities - entity_id: - name: entity_id - description: The compact uniform resource identifier (CURIE) of the entity, - in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. - range: text - required: true - multivalued: false - entity_uri: - name: entity_uri - description: The URI for the entity this reference applies to. This can be - an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 - range: text - required: true - multivalued: false - ExternalResources__objects: - name: ExternalResources__objects - description: A table for identifying which objects in a file contain references - to external resources. - is_a: Data - attributes: - name: - name: name - ifabsent: string(objects) - range: string - required: true - equals_string: objects - files_idx: - name: files_idx - description: The row index to the file in the `files` table containing the - object. - range: uint - required: true - multivalued: false - object_id: - name: object_id - description: The object id (UUID) of the object. - range: text - required: true - multivalued: false - object_type: - name: object_type - description: The data type of the object. - range: text - required: true - multivalued: false - relative_path: - name: relative_path - description: The relative path from the data object with the `object_id` to - the dataset or attribute with the value(s) that is associated with an external - resource. This can be an empty string if the object is a dataset that contains - the value(s) that is associated with an external resource. - range: text - required: true - multivalued: false - field: - name: field - description: The field within the compound data type using an external resource. - This is used only if the dataset or attribute is a compound data type; otherwise - this should be an empty string. - range: text - required: true - multivalued: false - ExternalResources__object_keys: - name: ExternalResources__object_keys - description: A table for identifying which objects use which keys. - is_a: Data - attributes: - name: - name: name - ifabsent: string(object_keys) - range: string - required: true - equals_string: object_keys - objects_idx: - name: objects_idx - description: The row index to the object in the `objects` table that holds - the key - range: uint - required: true - multivalued: false - keys_idx: - name: keys_idx - description: The row index to the key in the `keys` table. - range: uint - required: true - multivalued: false - ExternalResources__entity_keys: - name: ExternalResources__entity_keys - description: A table for identifying which keys use which entity. - is_a: Data - attributes: - name: - name: name - ifabsent: string(entity_keys) - range: string - required: true - equals_string: entity_keys - entities_idx: - name: entities_idx - description: The row index to the entity in the `entities` table. - range: uint - required: true - multivalued: false - keys_idx: - name: keys_idx - description: The row index to the key in the `keys` table. - range: uint - required: true - multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml deleted file mode 100644 index a48814e..0000000 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: hdmf-experimental -annotations: - is_namespace: - tag: is_namespace - value: true - namespace: - tag: namespace - value: hdmf-experimental -description: Experimental data structures provided by HDMF. These are not guaranteed - to be available in the future. -id: hdmf-experimental -version: 0.4.0 -imports: -- hdmf-experimental.experimental -- hdmf-experimental.resources -- hdmf-experimental.nwb.language -default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml index a884e44..bab9ede 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: hdmf-experimental + value: core description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: @@ -19,67 +19,53 @@ types: float32: name: float32 typeof: float - repr: np.float32 float64: name: float64 typeof: double - repr: np.float64 long: name: long typeof: integer - repr: np.longlong int64: name: int64 typeof: integer - repr: np.int64 int: name: int typeof: integer int32: name: int32 typeof: integer - repr: np.int32 int16: name: int16 typeof: integer - repr: np.int16 short: name: short typeof: integer - repr: np.int16 int8: name: int8 typeof: integer - repr: np.int8 uint: name: uint typeof: integer - repr: np.uint64 minimum_value: 0 uint32: name: uint32 typeof: integer - repr: np.uint32 minimum_value: 0 uint16: name: uint16 typeof: integer - repr: np.uint16 minimum_value: 0 uint8: name: uint8 typeof: integer - repr: np.uint8 minimum_value: 0 uint64: name: uint64 typeof: integer - repr: np.uint64 minimum_value: 0 numeric: name: numeric typeof: float - repr: np.number text: name: text typeof: string @@ -101,7 +87,6 @@ types: isodatetime: name: isodatetime typeof: datetime - repr: np.datetime64 classes: AnyType: name: AnyType From 2fef717a62589c0175f6c770b07fe7d9556198e7 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 31 Jul 2024 01:58:31 -0700 Subject: [PATCH 07/61] fix attr access in linkml provider result --- nwb_linkml/src/nwb_linkml/providers/linkml.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nwb_linkml/src/nwb_linkml/providers/linkml.py b/nwb_linkml/src/nwb_linkml/providers/linkml.py index 4fc6233..4af2bec 100644 --- a/nwb_linkml/src/nwb_linkml/providers/linkml.py +++ b/nwb_linkml/src/nwb_linkml/providers/linkml.py @@ -285,4 +285,4 @@ class LinkMLProvider(Provider): ) ns_file = ns_repo.provide_from_git(commit=version) res = self.build_from_yaml(ns_file) - return res[namespace]["namespace"] + return res[namespace].namespace From 66369c76860fe5cb02161bbda08a48e3b5eb2868 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 31 Jul 2024 02:00:31 -0700 Subject: [PATCH 08/61] no op to update github cache? --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 899d444..ad3f28c 100644 --- a/README.md +++ b/README.md @@ -9,4 +9,4 @@ Translating NWB schema language to linkml Just submitting to pypi to squat the package name -[![Coverage Status](https://coveralls.io/repos/github/p2p-ld/nwb-linkml/badge.svg)](https://coveralls.io/github/p2p-ld/nwb-linkml) \ No newline at end of file +[![Coverage Status](https://coveralls.io/repos/github/p2p-ld/nwb-linkml/badge.svg)](https://coveralls.io/github/p2p-ld/nwb-linkml) \ No newline at end of file From b2d7b12a78ad0cf2da40796fa4416b8937abf298 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 31 Jul 2024 02:07:44 -0700 Subject: [PATCH 09/61] update numpydantic version and relock --- nwb_linkml/pdm.lock | 72 +++++++++++++++++++-------------------- nwb_linkml/pyproject.toml | 2 +- 2 files changed, 37 insertions(+), 37 deletions(-) diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock index 7306eec..5ed71bf 100644 --- a/nwb_linkml/pdm.lock +++ b/nwb_linkml/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "tests"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.2" -content_hash = "sha256:a4d05bde012bc589ce24125d77ba68afedea5cf98ff044b6a0fb27f44d3982f1" +content_hash = "sha256:d5877e87ce194e1cd8d62d8e935e39941e0c8f78667e0240ff64690f848d4a6c" [[package]] name = "annotated-types" @@ -839,20 +839,20 @@ files = [ [[package]] name = "linkml" version = "0.0.0" -requires_python = ">=3.8.1,<4.0.0" +requires_python = "<4.0.0,>=3.8.1" git = "https://github.com/sneakers-the-rat/linkml" ref = "nwb-linkml" -revision = "df8685eb9e99eaf9ec694db2e9cd59bab8892438" +revision = "0a6578bff4713688260f64b3076b197bd6decce9" summary = "Linked Open Data Modeling Language" groups = ["default", "dev", "tests"] dependencies = [ - "antlr4-python3-runtime<4.10,>=4.9.0", + "antlr4-python3-runtime<4.10,==4.*,>=4.9.0", "click>=7.0", "graphviz>=0.10.1", "hbreader", "isodate>=0.6.0", "jinja2>=3.1.0", - "jsonasobj2<2.dev0,>=1.0.3", + "jsonasobj2==1.*,>=1.0.0,>=1.0.3", "jsonschema[format]>=4.0.0", "linkml-dataops", "linkml-runtime==1.8.0", @@ -1078,7 +1078,7 @@ files = [ [[package]] name = "numpydantic" -version = "1.2.1" +version = "1.2.2" requires_python = "<4.0,>=3.9" summary = "Type and shape validation and serialization for numpy arrays in pydantic models" groups = ["default", "dev", "tests"] @@ -1088,8 +1088,8 @@ dependencies = [ "pydantic>=2.3.0", ] files = [ - {file = "numpydantic-1.2.1-py3-none-any.whl", hash = "sha256:e21d7e272410b3a2013d2a6aeec2ed6efd13ea171b0200e2029d7c2f1453def0"}, - {file = "numpydantic-1.2.1.tar.gz", hash = "sha256:d8a3e7371d78b99fa4a4733a5b873046f064993431ae63f97edcf9bda4dd5c7f"}, + {file = "numpydantic-1.2.2-py3-none-any.whl", hash = "sha256:05481d7dbb202b6e31a97d77fa9d650b57766f996981076844572c7ddeff1a0b"}, + {file = "numpydantic-1.2.2.tar.gz", hash = "sha256:4e81c1d162b7a8bbec52c69011e745488bd452b017375d7df8f1de8ce96705a1"}, ] [[package]] @@ -1369,7 +1369,7 @@ files = [ [[package]] name = "pydantic-settings" -version = "2.3.4" +version = "2.4.0" requires_python = ">=3.8" summary = "Settings management using Pydantic" groups = ["default", "dev", "tests"] @@ -1378,8 +1378,8 @@ dependencies = [ "python-dotenv>=0.21.0", ] files = [ - {file = "pydantic_settings-2.3.4-py3-none-any.whl", hash = "sha256:11ad8bacb68a045f00e4f862c7a718c8a9ec766aa8fd4c32e39a0594b207b53a"}, - {file = "pydantic_settings-2.3.4.tar.gz", hash = "sha256:c5802e3d62b78e82522319bbc9b8f8ffb28ad1c988a99311d04f2a6051fca0a7"}, + {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, + {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, ] [[package]] @@ -1901,40 +1901,40 @@ files = [ [[package]] name = "ruff" -version = "0.5.4" +version = "0.5.5" requires_python = ">=3.7" summary = "An extremely fast Python linter and code formatter, written in Rust." groups = ["dev"] files = [ - {file = "ruff-0.5.4-py3-none-linux_armv6l.whl", hash = "sha256:82acef724fc639699b4d3177ed5cc14c2a5aacd92edd578a9e846d5b5ec18ddf"}, - {file = "ruff-0.5.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:da62e87637c8838b325e65beee485f71eb36202ce8e3cdbc24b9fcb8b99a37be"}, - {file = "ruff-0.5.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e98ad088edfe2f3b85a925ee96da652028f093d6b9b56b76fc242d8abb8e2059"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c55efbecc3152d614cfe6c2247a3054cfe358cefbf794f8c79c8575456efe19"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9b85eaa1f653abd0a70603b8b7008d9e00c9fa1bbd0bf40dad3f0c0bdd06793"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cf497a47751be8c883059c4613ba2f50dd06ec672692de2811f039432875278"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:09c14ed6a72af9ccc8d2e313d7acf7037f0faff43cde4b507e66f14e812e37f7"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:628f6b8f97b8bad2490240aa84f3e68f390e13fabc9af5c0d3b96b485921cd60"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3520a00c0563d7a7a7c324ad7e2cde2355733dafa9592c671fb2e9e3cd8194c1"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93789f14ca2244fb91ed481456f6d0bb8af1f75a330e133b67d08f06ad85b516"}, - {file = "ruff-0.5.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:029454e2824eafa25b9df46882f7f7844d36fd8ce51c1b7f6d97e2615a57bbcc"}, - {file = "ruff-0.5.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9492320eed573a13a0bc09a2957f17aa733fff9ce5bf00e66e6d4a88ec33813f"}, - {file = "ruff-0.5.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a6e1f62a92c645e2919b65c02e79d1f61e78a58eddaebca6c23659e7c7cb4ac7"}, - {file = "ruff-0.5.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:768fa9208df2bec4b2ce61dbc7c2ddd6b1be9fb48f1f8d3b78b3332c7d71c1ff"}, - {file = "ruff-0.5.4-py3-none-win32.whl", hash = "sha256:e1e7393e9c56128e870b233c82ceb42164966f25b30f68acbb24ed69ce9c3a4e"}, - {file = "ruff-0.5.4-py3-none-win_amd64.whl", hash = "sha256:58b54459221fd3f661a7329f177f091eb35cf7a603f01d9eb3eb11cc348d38c4"}, - {file = "ruff-0.5.4-py3-none-win_arm64.whl", hash = "sha256:bd53da65f1085fb5b307c38fd3c0829e76acf7b2a912d8d79cadcdb4875c1eb7"}, - {file = "ruff-0.5.4.tar.gz", hash = "sha256:2795726d5f71c4f4e70653273d1c23a8182f07dd8e48c12de5d867bfb7557eed"}, + {file = "ruff-0.5.5-py3-none-linux_armv6l.whl", hash = "sha256:605d589ec35d1da9213a9d4d7e7a9c761d90bba78fc8790d1c5e65026c1b9eaf"}, + {file = "ruff-0.5.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00817603822a3e42b80f7c3298c8269e09f889ee94640cd1fc7f9329788d7bf8"}, + {file = "ruff-0.5.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:187a60f555e9f865a2ff2c6984b9afeffa7158ba6e1eab56cb830404c942b0f3"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe26fc46fa8c6e0ae3f47ddccfbb136253c831c3289bba044befe68f467bfb16"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad25dd9c5faac95c8e9efb13e15803cd8bbf7f4600645a60ffe17c73f60779b"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f70737c157d7edf749bcb952d13854e8f745cec695a01bdc6e29c29c288fc36e"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cfd7de17cef6ab559e9f5ab859f0d3296393bc78f69030967ca4d87a541b97a0"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09b43e02f76ac0145f86a08e045e2ea452066f7ba064fd6b0cdccb486f7c3e7"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0b856cb19c60cd40198be5d8d4b556228e3dcd545b4f423d1ad812bfdca5884"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3687d002f911e8a5faf977e619a034d159a8373514a587249cc00f211c67a091"}, + {file = "ruff-0.5.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ac9dc814e510436e30d0ba535f435a7f3dc97f895f844f5b3f347ec8c228a523"}, + {file = "ruff-0.5.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:af9bdf6c389b5add40d89b201425b531e0a5cceb3cfdcc69f04d3d531c6be74f"}, + {file = "ruff-0.5.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d40a8533ed545390ef8315b8e25c4bb85739b90bd0f3fe1280a29ae364cc55d8"}, + {file = "ruff-0.5.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cab904683bf9e2ecbbe9ff235bfe056f0eba754d0168ad5407832928d579e7ab"}, + {file = "ruff-0.5.5-py3-none-win32.whl", hash = "sha256:696f18463b47a94575db635ebb4c178188645636f05e934fdf361b74edf1bb2d"}, + {file = "ruff-0.5.5-py3-none-win_amd64.whl", hash = "sha256:50f36d77f52d4c9c2f1361ccbfbd09099a1b2ea5d2b2222c586ab08885cf3445"}, + {file = "ruff-0.5.5-py3-none-win_arm64.whl", hash = "sha256:3191317d967af701f1b73a31ed5788795936e423b7acce82a2b63e26eb3e89d6"}, + {file = "ruff-0.5.5.tar.gz", hash = "sha256:cc5516bdb4858d972fbc31d246bdb390eab8df1a26e2353be2dbc0c2d7f5421a"}, ] [[package]] name = "setuptools" -version = "71.1.0" +version = "72.1.0" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" groups = ["dev", "tests"] files = [ - {file = "setuptools-71.1.0-py3-none-any.whl", hash = "sha256:33874fdc59b3188304b2e7c80d9029097ea31627180896fb549c578ceb8a0855"}, - {file = "setuptools-71.1.0.tar.gz", hash = "sha256:032d42ee9fb536e33087fb66cac5f840eb9391ed05637b3f2a76a7c8fb477936"}, + {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, + {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, ] [[package]] @@ -2053,13 +2053,13 @@ files = [ [[package]] name = "tenacity" -version = "8.5.0" +version = "9.0.0" requires_python = ">=3.8" summary = "Retry code until it succeeds" groups = ["dev", "tests"] files = [ - {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, - {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, + {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, + {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, ] [[package]] diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index f1b7268..b90d92c 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -22,7 +22,7 @@ dependencies = [ "dask>=2023.9.2", "tqdm>=4.66.1", 'typing-extensions>=4.12.2;python_version<"3.11"', - "numpydantic>=1.2.1", + "numpydantic>=1.2.2", "black>=24.4.2", "pandas>=2.2.2", ] From 9bfee3548e718fc116927712d9ad02cc76eec2da Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 14:09:50 -0700 Subject: [PATCH 10/61] make names less lengthy by only contatenating one layer of nesting with __ --- .github/workflows/tests.yml | 3 +++ nwb_linkml/src/nwb_linkml/adapters/classes.py | 22 +++++++++++++++++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1b3002d..12ab684 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,6 +2,9 @@ name: Tests on: push: + pull_request: + branches: + - main jobs: test: diff --git a/nwb_linkml/src/nwb_linkml/adapters/classes.py b/nwb_linkml/src/nwb_linkml/adapters/classes.py index c700d53..fcdf43c 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/classes.py +++ b/nwb_linkml/src/nwb_linkml/adapters/classes.py @@ -133,7 +133,24 @@ class ClassAdapter(Adapter): """The full name of the object in the generated linkml Distinct from 'name' which is the thing that's used to define position in - a hierarchical data setting + a hierarchical data setting. + + Combines names from ``parent``, if present, using ``"__"`` . + Rather than concatenating the full series of names with ``__`` like + + * ``Parent`` + * ``Parent__child1`` + * ``Parent__child1__child2`` + + we only keep the last parent, so + + * ``Parent`` + * ``Parent__child1`` + * ``child1__child2`` + + The assumption is that a child name may not be unique, but the combination of + a parent/child pair should be unique enough to avoid name shadowing without + making humongous and cumbersome names. """ if self.cls.neurodata_type_def: name = self.cls.neurodata_type_def @@ -141,7 +158,8 @@ class ClassAdapter(Adapter): # not necessarily a unique name, so we combine parent names name_parts = [] if self.parent is not None: - name_parts.append(self.parent._get_full_name()) + parent_name = self.parent._get_full_name().split("__")[-1] + name_parts.append(parent_name) name_parts.append(self.cls.name) name = "__".join(name_parts) From e72c860edd697ef8de1ffb96e067693018cead21 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 14:19:03 -0700 Subject: [PATCH 11/61] update models with shorter names --- .../pydantic/core/v2_2_0/core_nwb_file.py | 24 +++++++++---------- .../models/pydantic/core/v2_2_0/namespace.py | 8 +++---- .../pydantic/core/v2_2_1/core_nwb_file.py | 24 +++++++++---------- .../models/pydantic/core/v2_2_1/namespace.py | 8 +++---- .../pydantic/core/v2_2_2/core_nwb_file.py | 24 +++++++++---------- .../models/pydantic/core/v2_2_2/namespace.py | 8 +++---- .../pydantic/core/v2_2_4/core_nwb_file.py | 24 +++++++++---------- .../models/pydantic/core/v2_2_4/namespace.py | 8 +++---- .../pydantic/core/v2_2_5/core_nwb_file.py | 24 +++++++++---------- .../models/pydantic/core/v2_2_5/namespace.py | 8 +++---- .../pydantic/core/v2_3_0/core_nwb_file.py | 24 +++++++++---------- .../models/pydantic/core/v2_3_0/namespace.py | 8 +++---- .../pydantic/core/v2_4_0/core_nwb_file.py | 24 +++++++++---------- .../models/pydantic/core/v2_4_0/namespace.py | 8 +++---- .../pydantic/core/v2_5_0/core_nwb_file.py | 24 +++++++++---------- .../models/pydantic/core/v2_5_0/namespace.py | 8 +++---- .../core/v2_6_0_alpha/core_nwb_file.py | 24 +++++++++---------- .../pydantic/core/v2_6_0_alpha/namespace.py | 8 +++---- .../pydantic/core/v2_7_0/core_nwb_file.py | 24 +++++++++---------- .../models/pydantic/core/v2_7_0/namespace.py | 8 +++---- .../linkml/core/v2_2_0/core.nwb.file.yaml | 24 +++++++++---------- .../linkml/core/v2_2_1/core.nwb.file.yaml | 24 +++++++++---------- .../linkml/core/v2_2_2/core.nwb.file.yaml | 24 +++++++++---------- .../linkml/core/v2_2_4/core.nwb.file.yaml | 24 +++++++++---------- .../linkml/core/v2_2_5/core.nwb.file.yaml | 24 +++++++++---------- .../linkml/core/v2_3_0/core.nwb.file.yaml | 24 +++++++++---------- .../linkml/core/v2_4_0/core.nwb.file.yaml | 24 +++++++++---------- .../linkml/core/v2_5_0/core.nwb.file.yaml | 24 +++++++++---------- .../core/v2_6_0_alpha/core.nwb.file.yaml | 24 +++++++++---------- .../linkml/core/v2_7_0/core.nwb.file.yaml | 24 +++++++++---------- 30 files changed, 280 insertions(+), 280 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py index 397abf7..8acf5f9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -256,7 +256,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -284,10 +284,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -302,7 +302,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -357,7 +357,7 @@ class Subject(NWBContainer): ) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -376,12 +376,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -516,7 +516,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -549,8 +549,8 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() +GeneralSourceScript.model_rebuild() Subject.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py index e2d169d..6ee8d4e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py @@ -128,11 +128,11 @@ from ...core.v2_2_0.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, + GeneralSourceScript, Subject, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, ) from ...core.v2_2_0.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py index c9f7dcd..f2eb79e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -256,7 +256,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -284,10 +284,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -302,7 +302,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -357,7 +357,7 @@ class Subject(NWBContainer): ) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -376,12 +376,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -516,7 +516,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -549,8 +549,8 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() +GeneralSourceScript.model_rebuild() Subject.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py index b798833..5ef5e82 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py @@ -128,11 +128,11 @@ from ...core.v2_2_1.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, + GeneralSourceScript, Subject, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, ) from ...core.v2_2_1.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py index fb720cb..4479d53 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -256,7 +256,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -284,10 +284,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -302,7 +302,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -357,7 +357,7 @@ class Subject(NWBContainer): ) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -376,12 +376,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -516,7 +516,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -549,8 +549,8 @@ class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() +GeneralSourceScript.model_rebuild() Subject.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py index 395e23e..8a011e2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py @@ -127,11 +127,11 @@ from ...core.v2_2_2.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, + GeneralSourceScript, Subject, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, ) from ...core.v2_2_2.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py index 6aba2d7..22f8f82 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -272,7 +272,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -300,10 +300,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -318,7 +318,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -335,7 +335,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel): value: str = Field(...) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -354,12 +354,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -494,7 +494,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -575,9 +575,9 @@ ScratchData.model_rebuild() NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralSourceScript.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py index fc74ca5..83cb593 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py @@ -134,10 +134,10 @@ from ...core.v2_2_4.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralSourceScript, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py index 22730c2..9f6c0c9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -272,7 +272,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -300,10 +300,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -318,7 +318,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -335,7 +335,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel): value: str = Field(...) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -354,12 +354,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -494,7 +494,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -575,9 +575,9 @@ ScratchData.model_rebuild() NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralSourceScript.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py index fae01ff..efff00e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py @@ -134,10 +134,10 @@ from ...core.v2_2_5.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralSourceScript, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py index cea7fa7..499296e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -272,7 +272,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -300,10 +300,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -318,7 +318,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -335,7 +335,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel): value: str = Field(...) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -354,12 +354,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -491,7 +491,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -573,9 +573,9 @@ ScratchData.model_rebuild() NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralSourceScript.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py index 807ed1c..9669627 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py @@ -137,10 +137,10 @@ from ...core.v2_3_0.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralSourceScript, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py index 211707f..6f128d3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -280,7 +280,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -308,10 +308,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -326,7 +326,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -343,7 +343,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel): value: str = Field(...) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -362,12 +362,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -499,7 +499,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -602,9 +602,9 @@ ScratchData.model_rebuild() NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralSourceScript.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py index f4b518c..664e6a7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py @@ -150,10 +150,10 @@ from ...core.v2_4_0.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralSourceScript, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py index f71c621..37dfb63 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -283,7 +283,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -311,10 +311,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -329,7 +329,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -346,7 +346,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel): value: str = Field(...) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -365,12 +365,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -502,7 +502,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -605,9 +605,9 @@ ScratchData.model_rebuild() NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralSourceScript.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py index 4851576..092a338 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py @@ -151,10 +151,10 @@ from ...core.v2_5_0.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralSourceScript, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index 453e761..ce4372c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -283,7 +283,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -311,10 +311,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -329,7 +329,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -346,7 +346,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel): value: str = Field(...) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -365,12 +365,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -502,7 +502,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -623,10 +623,10 @@ ScratchData.model_rebuild() NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralSourceScript.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() SubjectAge.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py index c6adbf3..9a21619 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py @@ -152,10 +152,10 @@ from ...core.v2_6_0_alpha.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralSourceScript, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, LabMetaData, Subject, SubjectAge, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py index 3ccc777..0e7d234 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -291,7 +291,7 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", ) - source_script: Optional[NWBFileGeneralSourceScript] = Field( + source_script: Optional[GeneralSourceScript] = Field( None, description="""Script file or link to public source code used to create this NWB file.""", ) @@ -319,10 +319,10 @@ class NWBFileGeneral(ConfiguredBaseModel): None, description="""Information about the animal or person from which the data was measured.""", ) - extracellular_ephys: Optional[NWBFileGeneralExtracellularEphys] = Field( + extracellular_ephys: Optional[GeneralExtracellularEphys] = Field( None, description="""Metadata related to extracellular electrophysiology.""" ) - intracellular_ephys: Optional[NWBFileGeneralIntracellularEphys] = Field( + intracellular_ephys: Optional[GeneralIntracellularEphys] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) optogenetics: Optional[List[OptogeneticStimulusSite]] = Field( @@ -337,7 +337,7 @@ class NWBFileGeneral(ConfiguredBaseModel): ) -class NWBFileGeneralSourceScript(ConfiguredBaseModel): +class GeneralSourceScript(ConfiguredBaseModel): """ Script file or link to public source code used to create this NWB file. """ @@ -354,7 +354,7 @@ class NWBFileGeneralSourceScript(ConfiguredBaseModel): value: str = Field(...) -class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): +class GeneralExtracellularEphys(ConfiguredBaseModel): """ Metadata related to extracellular electrophysiology. """ @@ -373,12 +373,12 @@ class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Physical group of electrodes.""" ) - electrodes: Optional[NWBFileGeneralExtracellularEphysElectrodes] = Field( + electrodes: Optional[ExtracellularEphysElectrodes] = Field( None, description="""A table of all electrodes (i.e. channels) used for recording.""" ) -class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): +class ExtracellularEphysElectrodes(DynamicTable): """ A table of all electrodes (i.e. channels) used for recording. """ @@ -510,7 +510,7 @@ class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): ) -class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): +class GeneralIntracellularEphys(ConfiguredBaseModel): """ Metadata related to intracellular electrophysiology. """ @@ -631,10 +631,10 @@ ScratchData.model_rebuild() NWBFile.model_rebuild() NWBFileStimulus.model_rebuild() NWBFileGeneral.model_rebuild() -NWBFileGeneralSourceScript.model_rebuild() -NWBFileGeneralExtracellularEphys.model_rebuild() -NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() -NWBFileGeneralIntracellularEphys.model_rebuild() +GeneralSourceScript.model_rebuild() +GeneralExtracellularEphys.model_rebuild() +ExtracellularEphysElectrodes.model_rebuild() +GeneralIntracellularEphys.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() SubjectAge.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py index b347afb..80b7f5e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py @@ -153,10 +153,10 @@ from ...core.v2_7_0.core_nwb_file import ( NWBFile, NWBFileStimulus, NWBFileGeneral, - NWBFileGeneralSourceScript, - NWBFileGeneralExtracellularEphys, - NWBFileGeneralExtracellularEphysElectrodes, - NWBFileGeneralIntracellularEphys, + GeneralSourceScript, + GeneralExtracellularEphys, + ExtracellularEphysElectrodes, + GeneralIntracellularEphys, LabMetaData, Subject, SubjectAge, diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml index 6a76bfa..cad0645 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml @@ -359,7 +359,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -408,13 +408,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -433,8 +433,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -513,8 +513,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -532,11 +532,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -653,8 +653,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml index 44f6d85..8506a84 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml @@ -359,7 +359,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -408,13 +408,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -433,8 +433,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -513,8 +513,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -532,11 +532,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -653,8 +653,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml index bec9dfd..4d2c3f8 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml @@ -359,7 +359,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -408,13 +408,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -433,8 +433,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -513,8 +513,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -532,11 +532,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -653,8 +653,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml index 0c91895..885874a 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml @@ -373,7 +373,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -422,13 +422,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -447,8 +447,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -466,8 +466,8 @@ classes: name: value range: text required: true - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -485,11 +485,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -606,8 +606,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml index 5974be3..a0aa5f7 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml @@ -373,7 +373,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -422,13 +422,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -447,8 +447,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -466,8 +466,8 @@ classes: name: value range: text required: true - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -485,11 +485,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -606,8 +606,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml index 9b3da02..5b00582 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml @@ -373,7 +373,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -422,13 +422,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -447,8 +447,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -466,8 +466,8 @@ classes: name: value range: text required: true - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -485,11 +485,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -607,8 +607,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml index ed8fdda..7aade20 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml @@ -373,7 +373,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -422,13 +422,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -447,8 +447,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -466,8 +466,8 @@ classes: name: value range: text required: true - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -485,11 +485,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -607,8 +607,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml index ab13eef..494e7ae 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml @@ -374,7 +374,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -423,13 +423,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -448,8 +448,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -467,8 +467,8 @@ classes: name: value range: text required: true - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -486,11 +486,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -610,8 +610,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml index 150219c..6e7a701 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml @@ -374,7 +374,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -423,13 +423,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -448,8 +448,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -467,8 +467,8 @@ classes: name: value range: text required: true - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -486,11 +486,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -610,8 +610,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml index b846a65..f0c47ab 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml @@ -376,7 +376,7 @@ classes: name: source_script description: Script file or link to public source code used to create this NWB file. - range: NWBFile__general__source_script + range: general__source_script required: false multivalued: false stimulus: @@ -425,13 +425,13 @@ classes: extracellular_ephys: name: extracellular_ephys description: Metadata related to extracellular electrophysiology. - range: NWBFile__general__extracellular_ephys + range: general__extracellular_ephys required: false multivalued: false intracellular_ephys: name: intracellular_ephys description: Metadata related to intracellular electrophysiology. - range: NWBFile__general__intracellular_ephys + range: general__intracellular_ephys required: false multivalued: false optogenetics: @@ -450,8 +450,8 @@ classes: inlined_as_list: false any_of: - range: ImagingPlane - NWBFile__general__source_script: - name: NWBFile__general__source_script + general__source_script: + name: general__source_script description: Script file or link to public source code used to create this NWB file. attributes: @@ -469,8 +469,8 @@ classes: name: value range: text required: true - NWBFile__general__extracellular_ephys: - name: NWBFile__general__extracellular_ephys + general__extracellular_ephys: + name: general__extracellular_ephys description: Metadata related to extracellular electrophysiology. attributes: name: @@ -488,11 +488,11 @@ classes: electrodes: name: electrodes description: A table of all electrodes (i.e. channels) used for recording. - range: NWBFile__general__extracellular_ephys__electrodes + range: extracellular_ephys__electrodes required: false multivalued: false - NWBFile__general__extracellular_ephys__electrodes: - name: NWBFile__general__extracellular_ephys__electrodes + extracellular_ephys__electrodes: + name: extracellular_ephys__electrodes description: A table of all electrodes (i.e. channels) used for recording. is_a: DynamicTable attributes: @@ -612,8 +612,8 @@ classes: range: text required: false multivalued: false - NWBFile__general__intracellular_ephys: - name: NWBFile__general__intracellular_ephys + general__intracellular_ephys: + name: general__intracellular_ephys description: Metadata related to intracellular electrophysiology. attributes: name: From b3b5b9d793d7f5fff21bc8f2bc7dc6cb991da06e Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 16:05:44 -0700 Subject: [PATCH 12/61] unify naming of derived child groups, arrays, and values as just all being named "value". Add link building. Fix erroneously collapsing anonymous and named container groups. --- docs/intro/translation.md | 25 ++- nwb_linkml/src/nwb_linkml/adapters/dataset.py | 31 ++-- nwb_linkml/src/nwb_linkml/adapters/group.py | 56 ++++--- nwb_linkml/tests/test_includes/test_hdmf.py | 6 +- scripts/generate_core.py | 143 +++++++++--------- 5 files changed, 156 insertions(+), 105 deletions(-) diff --git a/docs/intro/translation.md b/docs/intro/translation.md index 6170fee..a7dec83 100644 --- a/docs/intro/translation.md +++ b/docs/intro/translation.md @@ -289,8 +289,31 @@ When generating pydantic models we... There are several different ways to create references between objects in nwb/hdmf: -- ... +- [`links`](https://schema-language.readthedocs.io/en/latest/description.html#sec-link-spec) are group-level + properties that can reference other groups or datasets like this: + ```yaml + links: + - name: Link name + doc: Required string with the description of the link + target_type: Type of target + quantity: Optional quantity identifier for the group (default=1). + ``` +- [Reference `dtype`](https://schema-language.readthedocs.io/en/latest/description.html#reference-dtype)s are + dataset, and attribute-level properties that can reference both other objects and regions within other objects: + ```yaml + dtype: + target_type: ElectrodeGroup + reftype: object + ``` +- Implicitly, hdmf creates references between objects according to some naming conventions, eg. + an attribute/dataset that is a `VectorIndex` named `mydata_index` will be linked to a `VectorData` + object `mydata`. +- There is currrently a note in the schema language docs that there will be an additional + [Relationships](https://schema-language.readthedocs.io/en/latest/description.html#relationships) system + that explicitly models relationships, but it is unclear how that would be different than references. +We represent all of these by just directly referring to the object type, preserving the source type +in an annotation, when necessary. ## LinkML to Everything diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py index 8bc34b6..e9268cb 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py +++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py @@ -216,8 +216,8 @@ class MapListlike(DatasetMap): Used exactly once in the core schema, in ``ImageReferences`` - an array of references to other ``Image`` datasets. We ignore the - usual array structure and unnest the implicit array into a slot names from the - target type rather than the oddly-named ``num_images`` dimension so that + usual array structure and unnest the implicit array into a slot named "value" + rather than the oddly-named ``num_images`` dimension so that ultimately in the pydantic model we get a nicely behaved single-level list. Examples: @@ -245,7 +245,7 @@ class MapListlike(DatasetMap): name: name range: string required: true - image: + value: name: image description: Ordered dataset of references to Image objects. multivalued: true @@ -286,15 +286,15 @@ class MapListlike(DatasetMap): """ Map to a list of the given class """ - dtype = camel_to_snake(ClassAdapter.handle_dtype(cls.dtype)) slot = SlotDefinition( - name=dtype, + name="value", multivalued=True, range=ClassAdapter.handle_dtype(cls.dtype), description=cls.doc, required=cls.quantity not in ("*", "?"), + annotations=[{"source_type": "reference"}], ) - res.classes[0].attributes[dtype] = slot + res.classes[0].attributes["value"] = slot return res @@ -533,9 +533,9 @@ class MapArrayLikeAttributes(DatasetMap): expressions = array_adapter.make_slot() # make a slot for the arraylike class array_slot = SlotDefinition( - name="array", range=ClassAdapter.handle_dtype(cls.dtype), **expressions + name="value", range=ClassAdapter.handle_dtype(cls.dtype), **expressions ) - res.classes[0].attributes.update({"array": array_slot}) + res.classes[0].attributes.update({"value": array_slot}) return res @@ -572,7 +572,7 @@ class MapClassRange(DatasetMap): name=cls.name, description=cls.doc, range=f"{cls.neurodata_type_inc}", - annotations=[{"named": True}], + annotations=[{"named": True}, {"source_type": "neurodata_type_inc"}], **QUANTITY_MAP[cls.quantity], ) res = BuildResult(slots=[this_slot]) @@ -686,17 +686,28 @@ class MapNVectors(DatasetMap): Most commonly: ``VectorData`` is subclassed without a name and with a '*' quantity to indicate arbitrary columns. + + Used twice: + - Images + - DynamicTable (and all its uses) + + DynamicTable (and the slot VectorData where this is called for) + is handled specially and just dropped, because we handle the possibility for + arbitrary extra VectorData in the :mod:`nwb_linkml.includes.hdmf` module mixin classes. + + So really this is just a handler for the `Images` case """ @classmethod def check(c, cls: Dataset) -> bool: """ - Check for being an unnamed multivalued vector class + Check for being an unnamed multivalued vector class that isn't VectorData """ return ( cls.name is None and cls.neurodata_type_def is None and cls.neurodata_type_inc + and cls.neurodata_type_inc != "VectorData" and cls.quantity in ("*", "+") ) diff --git a/nwb_linkml/src/nwb_linkml/adapters/group.py b/nwb_linkml/src/nwb_linkml/adapters/group.py index 3b75487..451cb4c 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/group.py +++ b/nwb_linkml/src/nwb_linkml/adapters/group.py @@ -2,7 +2,7 @@ Adapter for NWB groups to linkml Classes """ -from typing import Type +from typing import Type, List from linkml_runtime.linkml_model import SlotDefinition @@ -28,25 +28,13 @@ class GroupAdapter(ClassAdapter): Do the translation, yielding the BuildResult """ # Handle container groups with only * quantity unnamed groups - if len(self.cls.groups) > 0 and all( - [self._check_if_container(g) for g in self.cls.groups] + if ( + len(self.cls.groups) > 0 + and not self.cls.links + and all([self._check_if_container(g) for g in self.cls.groups]) ): # and \ # self.parent is not None: return self.handle_container_group(self.cls) - # Or you can have groups like /intervals where there are some named groups, and some unnamed - # but they all have the same type - elif ( - len(self.cls.groups) > 0 - and all( - [ - g.neurodata_type_inc == self.cls.groups[0].neurodata_type_inc - for g in self.cls.groups - ] - ) - and self.cls.groups[0].neurodata_type_inc is not None - and all([g.quantity in ("?", "*") for g in self.cls.groups]) - ): - return self.handle_container_group(self.cls) # handle if we are a terminal container group without making a new class if ( @@ -58,17 +46,42 @@ class GroupAdapter(ClassAdapter): return self.handle_container_slot(self.cls) nested_res = self.build_subclasses() + # add links + links = self.build_links() + # we don't propagate slots up to the next level since they are meant for this # level (ie. a way to refer to our children) - res = self.build_base(extra_attrs=nested_res.slots) + res = self.build_base(extra_attrs=nested_res.slots + links) # we do propagate classes tho res.classes.extend(nested_res.classes) return res + def build_links(self) -> List[SlotDefinition]: + """ + Build links specified in the ``links`` field as slots that refer to other + classes, with an additional annotation specifying that they are in fact links. + + Link slots can take either the object itself or the path to that object in the + file hierarchy as a string. + """ + if not self.cls.links: + return [] + + slots = [ + SlotDefinition( + name=link.name, + any_of=[{"range": link.target_type}, {"range": "string"}], + annotations=[{"tag": "source_type", "value": "link"}], + **QUANTITY_MAP[link.quantity], + ) + for link in self.cls.links + ] + return slots + def handle_container_group(self, cls: Group) -> BuildResult: """ - Make a special LinkML `children` slot that can + Make a special LinkML `value` slot that can have any number of the objects that are of `neurodata_type_inc` class Examples: @@ -84,14 +97,11 @@ class GroupAdapter(ClassAdapter): doc: Images objects containing images of presented stimuli. quantity: '*' - Args: - children (List[:class:`.Group`]): Child groups - """ # don't build subgroups as their own classes, just make a slot # that can contain them - name = cls.name if self.cls.name else "children" + name = cls.name if self.cls.name else "value" slot = SlotDefinition( name=name, diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 26f5109..73f08ef 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -7,12 +7,12 @@ import pytest from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( ElectricalSeries, ElectrodeGroup, - NWBFileGeneralExtracellularEphysElectrodes, + ExtracellularEphysElectrodes, ) @pytest.fixture() -def electrical_series() -> Tuple["ElectricalSeries", "NWBFileGeneralExtracellularEphysElectrodes"]: +def electrical_series() -> Tuple["ElectricalSeries", "ExtracellularEphysElectrodes"]: """ Demo electrical series with adjoining electrodes """ @@ -27,7 +27,7 @@ def electrical_series() -> Tuple["ElectricalSeries", "NWBFileGeneralExtracellula ) # make electrodes tables - electrodes = NWBFileGeneralExtracellularEphysElectrodes( + electrodes = ExtracellularEphysElectrodes( id=np.arange(0, n_electrodes), x=np.arange(0, n_electrodes), y=np.arange(n_electrodes, n_electrodes * 2), diff --git a/scripts/generate_core.py b/scripts/generate_core.py index b447e00..7f86171 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -17,44 +17,53 @@ from nwb_linkml.providers import LinkMLProvider, PydanticProvider from nwb_linkml.providers.git import NWB_CORE_REPO, HDMF_COMMON_REPO, GitRepo from nwb_linkml.io import schema as io -def generate_core_yaml(output_path:Path, dry_run:bool=False, hdmf_only:bool=False): + +def generate_core_yaml(output_path: Path, dry_run: bool = False, hdmf_only: bool = False): """Just build the latest version of the core schema""" core = io.load_nwb_core(hdmf_only=hdmf_only) built_schemas = core.build().schemas for schema in built_schemas: - output_file = output_path / (schema.name + '.yaml') + output_file = output_path / (schema.name + ".yaml") if not dry_run: yaml_dumper.dump(schema, output_file) -def generate_core_pydantic(yaml_path:Path, output_path:Path, dry_run:bool=False): + +def generate_core_pydantic(yaml_path: Path, output_path: Path, dry_run: bool = False): """Just generate the latest version of the core schema""" - for schema in yaml_path.glob('*.yaml'): - python_name = schema.stem.replace('.', '_').replace('-', '_') - pydantic_file = (output_path / python_name).with_suffix('.py') + for schema in yaml_path.glob("*.yaml"): + python_name = schema.stem.replace(".", "_").replace("-", "_") + pydantic_file = (output_path / python_name).with_suffix(".py") generator = NWBPydanticGenerator( str(schema), - pydantic_version='2', + pydantic_version="2", emit_metadata=True, gen_classvars=True, - gen_slots=True + gen_slots=True, ) gen_pydantic = generator.serialize() if not dry_run: - with open(pydantic_file, 'w') as pfile: + with open(pydantic_file, "w") as pfile: pfile.write(gen_pydantic) -def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False, repo:GitRepo=NWB_CORE_REPO, hdmf_only=False): + +def generate_versions( + yaml_path: Path, + pydantic_path: Path, + dry_run: bool = False, + repo: GitRepo = NWB_CORE_REPO, + hdmf_only=False, +): """ Generate linkml models for all versions """ - #repo.clone(force=True) + # repo.clone(force=True) repo.clone() # use a directory underneath this one as the temporary directory rather than # the default hidden one - tmp_dir = Path(__file__).parent / '__tmp__' + tmp_dir = Path(__file__).parent / "__tmp__" if tmp_dir.exists(): shutil.rmtree(tmp_dir) tmp_dir.mkdir() @@ -65,12 +74,14 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False, re failed_versions = {} overall_progress = Progress() - overall_task = overall_progress.add_task('All Versions', total=len(NWB_CORE_REPO.versions)) + overall_task = overall_progress.add_task("All Versions", total=len(NWB_CORE_REPO.versions)) build_progress = Progress( - TextColumn("[bold blue]{task.fields[name]} - [bold green]{task.fields[action]}", - table_column=Column(ratio=1)), - BarColumn(table_column=Column(ratio=1), bar_width=None) + TextColumn( + "[bold blue]{task.fields[name]} - [bold green]{task.fields[action]}", + table_column=Column(ratio=1), + ), + BarColumn(table_column=Column(ratio=1), bar_width=None), ) panel = Panel(Group(build_progress, overall_progress)) @@ -84,7 +95,9 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False, re # build linkml try: # check out the version (this should also refresh the hdmf-common schema) - linkml_task = build_progress.add_task('', name=version, action='Checkout Version', total=3) + linkml_task = build_progress.add_task( + "", name=version, action="Checkout Version", total=3 + ) repo.tag = version build_progress.update(linkml_task, advance=1, action="Load Namespaces") @@ -92,35 +105,36 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False, re core_ns = io.load_namespace_adapter(repo.namespace_file) if repo.namespace == NWB_CORE_REPO: # then the hdmf-common namespace - hdmf_common_ns = io.load_namespace_adapter(repo.temp_directory / 'hdmf-common-schema' / 'common' / 'namespace.yaml') + hdmf_common_ns = io.load_namespace_adapter( + repo.temp_directory / "hdmf-common-schema" / "common" / "namespace.yaml" + ) core_ns.imported.append(hdmf_common_ns) build_progress.update(linkml_task, advance=1, action="Build LinkML") - linkml_res = linkml_provider.build(core_ns) build_progress.update(linkml_task, advance=1, action="Built LinkML") # build pydantic ns_files = [res.namespace for res in linkml_res.values()] - pydantic_task = build_progress.add_task('', name=version, action='', total=len(ns_files)) + pydantic_task = build_progress.add_task( + "", name=version, action="", total=len(ns_files) + ) for schema in ns_files: - pbar_string = ' - '.join([schema.parts[-3], schema.parts[-2], schema.parts[-1]]) + pbar_string = schema.parts[-3] build_progress.update(pydantic_task, action=pbar_string) pydantic_provider.build(schema, versions=core_ns.versions, split=True) build_progress.update(pydantic_task, advance=1) - build_progress.update(pydantic_task, action='Built Pydantic') - - + build_progress.update(pydantic_task, action="Built Pydantic") except Exception as e: build_progress.stop_task(linkml_task) if linkml_task is not None: - build_progress.update(linkml_task, action='[bold red]LinkML Build Failed') + build_progress.update(linkml_task, action="[bold red]LinkML Build Failed") build_progress.stop_task(linkml_task) if pydantic_task is not None: - build_progress.update(pydantic_task, action='[bold red]LinkML Build Failed') + build_progress.update(pydantic_task, action="[bold red]LinkML Build Failed") build_progress.stop_task(pydantic_task) failed_versions[version] = traceback.format_exception(e) @@ -131,67 +145,66 @@ def generate_versions(yaml_path:Path, pydantic_path:Path, dry_run:bool=False, re if not dry_run: if hdmf_only: - shutil.rmtree(yaml_path / 'linkml' / 'hdmf_common') - shutil.rmtree(yaml_path / 'linkml' / 'hdmf_experimental') - shutil.rmtree(pydantic_path / 'pydantic' / 'hdmf_common') - shutil.rmtree(pydantic_path / 'pydantic' / 'hdmf_experimental') - shutil.move(tmp_dir / 'linkml' / 'hdmf_common', yaml_path / 'linkml') - shutil.move(tmp_dir / 'linkml' / 'hdmf_experimental', yaml_path / 'linkml') - shutil.move(tmp_dir / 'pydantic' / 'hdmf_common', pydantic_path / 'pydantic') - shutil.move(tmp_dir / 'pydantic' / 'hdmf_experimental', pydantic_path / 'pydantic') + shutil.rmtree(yaml_path / "linkml" / "hdmf_common") + shutil.rmtree(yaml_path / "linkml" / "hdmf_experimental") + shutil.rmtree(pydantic_path / "pydantic" / "hdmf_common") + shutil.rmtree(pydantic_path / "pydantic" / "hdmf_experimental") + shutil.move(tmp_dir / "linkml" / "hdmf_common", yaml_path / "linkml") + shutil.move(tmp_dir / "linkml" / "hdmf_experimental", yaml_path / "linkml") + shutil.move(tmp_dir / "pydantic" / "hdmf_common", pydantic_path / "pydantic") + shutil.move(tmp_dir / "pydantic" / "hdmf_experimental", pydantic_path / "pydantic") else: - shutil.rmtree(yaml_path / 'linkml') - shutil.rmtree(pydantic_path / 'pydantic') - shutil.move(tmp_dir / 'linkml', yaml_path) - shutil.move(tmp_dir / 'pydantic', pydantic_path) + shutil.rmtree(yaml_path / "linkml") + shutil.rmtree(pydantic_path / "pydantic") + shutil.move(tmp_dir / "linkml", yaml_path) + shutil.move(tmp_dir / "pydantic", pydantic_path) # import the most recent version of the schemaz we built - latest_version = sorted((pydantic_path / 'pydantic' / 'core').iterdir(), key=os.path.getmtime)[-1] + latest_version = sorted( + (pydantic_path / "pydantic" / "core").iterdir(), key=os.path.getmtime + )[-1] # make inits to use the schema! we don't usually do this in the # provider class because we directly import the files there. - with open(pydantic_path / 'pydantic' / '__init__.py', 'w') as initfile: - initfile.write(' ') + with open(pydantic_path / "pydantic" / "__init__.py", "w") as initfile: + initfile.write(" ") - with open(pydantic_path / '__init__.py', 'w') as initfile: - initfile.write(f'from .pydantic.core.{latest_version.name}.namespace import *') + with open(pydantic_path / "__init__.py", "w") as initfile: + initfile.write(f"from .pydantic.core.{latest_version.name}.namespace import *") finally: if len(failed_versions) > 0: - print('Failed Building Versions:') + print("Failed Building Versions:") print(failed_versions) - - def parser() -> ArgumentParser: - parser = ArgumentParser('Generate all available versions of NWB core schema') + parser = ArgumentParser("Generate all available versions of NWB core schema") parser.add_argument( - '--yaml', + "--yaml", help="directory to export linkML schema to", type=Path, - default=Path(__file__).parent.parent / 'nwb_linkml' / 'src' / 'nwb_linkml' / 'schema' + default=Path(__file__).parent.parent / "nwb_linkml" / "src" / "nwb_linkml" / "schema", ) parser.add_argument( - '--pydantic', + "--pydantic", help="directory to export pydantic models", type=Path, - default=Path(__file__).parent.parent / 'nwb_linkml' / 'src' / 'nwb_linkml' / 'models' + default=Path(__file__).parent.parent / "nwb_linkml" / "src" / "nwb_linkml" / "models", ) + parser.add_argument("--hdmf", help="Only generate the HDMF namespaces", action="store_true") parser.add_argument( - '--hdmf', - help="Only generate the HDMF namespaces", - action="store_true" - ) - parser.add_argument( - '--latest', + "--latest", help="Only generate the latest version of the core schemas.", - action="store_true" + action="store_true", ) parser.add_argument( - '--dry-run', - help="Generate schema and pydantic models without moving them into the target directories, for testing purposes", - action='store_true' + "--dry-run", + help=( + "Generate schema and pydantic models without moving them into the target directories," + " for testing purposes" + ), + action="store_true", ) return parser @@ -212,12 +225,6 @@ def main(): else: generate_versions(args.yaml, args.pydantic, args.dry_run, repo, args.hdmf) + if __name__ == "__main__": main() - - - - - - - From 0a150d6bc282a10f96b69a6526dff49f4043b37d Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 16:06:05 -0700 Subject: [PATCH 13/61] update models --- .../pydantic/core/v2_2_0/core_nwb_base.py | 6 +- .../pydantic/core/v2_2_0/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_2_0/core_nwb_ecephys.py | 55 +++- .../pydantic/core/v2_2_0/core_nwb_epoch.py | 14 +- .../pydantic/core/v2_2_0/core_nwb_file.py | 44 ++- .../pydantic/core/v2_2_0/core_nwb_icephys.py | 73 ++++- .../pydantic/core/v2_2_0/core_nwb_image.py | 26 +- .../pydantic/core/v2_2_0/core_nwb_misc.py | 41 ++- .../pydantic/core/v2_2_0/core_nwb_ogen.py | 19 ++ .../pydantic/core/v2_2_0/core_nwb_ophys.py | 40 ++- .../core/v2_2_0/core_nwb_retinotopy.py | 50 +++- .../models/pydantic/core/v2_2_0/namespace.py | 1 + .../pydantic/core/v2_2_1/core_nwb_base.py | 6 +- .../pydantic/core/v2_2_1/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_2_1/core_nwb_ecephys.py | 55 +++- .../pydantic/core/v2_2_1/core_nwb_epoch.py | 14 +- .../pydantic/core/v2_2_1/core_nwb_file.py | 44 ++- .../pydantic/core/v2_2_1/core_nwb_icephys.py | 73 ++++- .../pydantic/core/v2_2_1/core_nwb_image.py | 26 +- .../pydantic/core/v2_2_1/core_nwb_misc.py | 41 ++- .../pydantic/core/v2_2_1/core_nwb_ogen.py | 19 ++ .../pydantic/core/v2_2_1/core_nwb_ophys.py | 40 ++- .../core/v2_2_1/core_nwb_retinotopy.py | 50 +++- .../models/pydantic/core/v2_2_1/namespace.py | 1 + .../pydantic/core/v2_2_2/core_nwb_base.py | 6 +- .../pydantic/core/v2_2_2/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_2_2/core_nwb_ecephys.py | 55 +++- .../pydantic/core/v2_2_2/core_nwb_epoch.py | 16 +- .../pydantic/core/v2_2_2/core_nwb_file.py | 44 ++- .../pydantic/core/v2_2_2/core_nwb_icephys.py | 73 ++++- .../pydantic/core/v2_2_2/core_nwb_image.py | 26 +- .../pydantic/core/v2_2_2/core_nwb_misc.py | 43 ++- .../pydantic/core/v2_2_2/core_nwb_ogen.py | 19 ++ .../pydantic/core/v2_2_2/core_nwb_ophys.py | 173 ++++++++++- .../core/v2_2_2/core_nwb_retinotopy.py | 14 +- .../models/pydantic/core/v2_2_2/namespace.py | 5 + .../pydantic/core/v2_2_4/core_nwb_base.py | 6 +- .../pydantic/core/v2_2_4/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_2_4/core_nwb_ecephys.py | 55 +++- .../pydantic/core/v2_2_4/core_nwb_epoch.py | 16 +- .../pydantic/core/v2_2_4/core_nwb_file.py | 44 ++- .../pydantic/core/v2_2_4/core_nwb_icephys.py | 73 ++++- .../pydantic/core/v2_2_4/core_nwb_image.py | 26 +- .../pydantic/core/v2_2_4/core_nwb_misc.py | 43 ++- .../pydantic/core/v2_2_4/core_nwb_ogen.py | 19 ++ .../pydantic/core/v2_2_4/core_nwb_ophys.py | 201 ++++++++++++- .../core/v2_2_4/core_nwb_retinotopy.py | 14 +- .../models/pydantic/core/v2_2_4/namespace.py | 4 + .../pydantic/core/v2_2_5/core_nwb_base.py | 6 +- .../pydantic/core/v2_2_5/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_2_5/core_nwb_ecephys.py | 55 +++- .../pydantic/core/v2_2_5/core_nwb_epoch.py | 16 +- .../pydantic/core/v2_2_5/core_nwb_file.py | 44 ++- .../pydantic/core/v2_2_5/core_nwb_icephys.py | 73 ++++- .../pydantic/core/v2_2_5/core_nwb_image.py | 26 +- .../pydantic/core/v2_2_5/core_nwb_misc.py | 43 ++- .../pydantic/core/v2_2_5/core_nwb_ogen.py | 19 ++ .../pydantic/core/v2_2_5/core_nwb_ophys.py | 181 +++++++++++- .../core/v2_2_5/core_nwb_retinotopy.py | 14 +- .../models/pydantic/core/v2_2_5/namespace.py | 4 + .../pydantic/core/v2_3_0/core_nwb_base.py | 6 +- .../pydantic/core/v2_3_0/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_3_0/core_nwb_ecephys.py | 55 +++- .../pydantic/core/v2_3_0/core_nwb_epoch.py | 16 +- .../pydantic/core/v2_3_0/core_nwb_file.py | 44 ++- .../pydantic/core/v2_3_0/core_nwb_icephys.py | 73 ++++- .../pydantic/core/v2_3_0/core_nwb_image.py | 54 +++- .../pydantic/core/v2_3_0/core_nwb_misc.py | 64 +++- .../pydantic/core/v2_3_0/core_nwb_ogen.py | 19 ++ .../pydantic/core/v2_3_0/core_nwb_ophys.py | 190 +++++++++++- .../core/v2_3_0/core_nwb_retinotopy.py | 14 +- .../models/pydantic/core/v2_3_0/namespace.py | 4 + .../pydantic/core/v2_4_0/core_nwb_base.py | 8 +- .../pydantic/core/v2_4_0/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_4_0/core_nwb_ecephys.py | 55 +++- .../pydantic/core/v2_4_0/core_nwb_epoch.py | 16 +- .../pydantic/core/v2_4_0/core_nwb_file.py | 44 ++- .../pydantic/core/v2_4_0/core_nwb_icephys.py | 125 +++++++- .../pydantic/core/v2_4_0/core_nwb_image.py | 54 +++- .../pydantic/core/v2_4_0/core_nwb_misc.py | 64 +++- .../pydantic/core/v2_4_0/core_nwb_ogen.py | 19 ++ .../pydantic/core/v2_4_0/core_nwb_ophys.py | 190 +++++++++++- .../core/v2_4_0/core_nwb_retinotopy.py | 14 +- .../models/pydantic/core/v2_4_0/namespace.py | 4 + .../pydantic/core/v2_5_0/core_nwb_base.py | 25 +- .../pydantic/core/v2_5_0/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_5_0/core_nwb_ecephys.py | 55 +++- .../pydantic/core/v2_5_0/core_nwb_epoch.py | 21 +- .../pydantic/core/v2_5_0/core_nwb_file.py | 44 ++- .../pydantic/core/v2_5_0/core_nwb_icephys.py | 125 +++++++- .../pydantic/core/v2_5_0/core_nwb_image.py | 71 ++++- .../pydantic/core/v2_5_0/core_nwb_misc.py | 64 +++- .../pydantic/core/v2_5_0/core_nwb_ogen.py | 19 ++ .../pydantic/core/v2_5_0/core_nwb_ophys.py | 190 +++++++++++- .../core/v2_5_0/core_nwb_retinotopy.py | 14 +- .../models/pydantic/core/v2_5_0/namespace.py | 4 + .../core/v2_6_0_alpha/core_nwb_base.py | 25 +- .../core/v2_6_0_alpha/core_nwb_behavior.py | 16 +- .../core/v2_6_0_alpha/core_nwb_ecephys.py | 55 +++- .../core/v2_6_0_alpha/core_nwb_epoch.py | 21 +- .../core/v2_6_0_alpha/core_nwb_file.py | 44 ++- .../core/v2_6_0_alpha/core_nwb_icephys.py | 125 +++++++- .../core/v2_6_0_alpha/core_nwb_image.py | 64 +++- .../core/v2_6_0_alpha/core_nwb_misc.py | 64 +++- .../core/v2_6_0_alpha/core_nwb_ogen.py | 19 ++ .../core/v2_6_0_alpha/core_nwb_ophys.py | 208 ++++++++++++- .../core/v2_6_0_alpha/core_nwb_retinotopy.py | 14 +- .../pydantic/core/v2_6_0_alpha/namespace.py | 4 + .../pydantic/core/v2_7_0/core_nwb_base.py | 25 +- .../pydantic/core/v2_7_0/core_nwb_behavior.py | 16 +- .../pydantic/core/v2_7_0/core_nwb_ecephys.py | 55 +++- .../pydantic/core/v2_7_0/core_nwb_epoch.py | 21 +- .../pydantic/core/v2_7_0/core_nwb_file.py | 44 ++- .../pydantic/core/v2_7_0/core_nwb_icephys.py | 132 ++++++++- .../pydantic/core/v2_7_0/core_nwb_image.py | 71 ++++- .../pydantic/core/v2_7_0/core_nwb_misc.py | 64 +++- .../pydantic/core/v2_7_0/core_nwb_ogen.py | 19 ++ .../pydantic/core/v2_7_0/core_nwb_ophys.py | 208 ++++++++++++- .../core/v2_7_0/core_nwb_retinotopy.py | 14 +- .../models/pydantic/core/v2_7_0/namespace.py | 4 + .../hdmf_common/v1_1_3/hdmf_common_table.py | 6 +- .../hdmf_common/v1_5_0/hdmf_common_base.py | 2 +- .../hdmf_common/v1_5_0/hdmf_common_table.py | 8 +- .../hdmf_common/v1_8_0/hdmf_common_base.py | 2 +- .../hdmf_common/v1_8_0/hdmf_common_table.py | 8 +- .../v0_1_0/hdmf_experimental_experimental.py | 2 +- .../v0_5_0/hdmf_experimental_experimental.py | 2 +- .../linkml/core/v2_2_0/core.nwb.base.yaml | 12 +- .../linkml/core/v2_2_0/core.nwb.behavior.yaml | 32 +- .../linkml/core/v2_2_0/core.nwb.ecephys.yaml | 51 +++- .../linkml/core/v2_2_0/core.nwb.epoch.yaml | 6 + .../linkml/core/v2_2_0/core.nwb.file.yaml | 49 +++- .../linkml/core/v2_2_0/core.nwb.icephys.yaml | 29 +- .../linkml/core/v2_2_0/core.nwb.image.yaml | 26 +- .../linkml/core/v2_2_0/core.nwb.misc.yaml | 31 +- .../linkml/core/v2_2_0/core.nwb.ogen.yaml | 22 ++ .../linkml/core/v2_2_0/core.nwb.ophys.yaml | 53 +++- .../core/v2_2_0/core.nwb.retinotopy.yaml | 26 +- .../linkml/core/v2_2_1/core.nwb.base.yaml | 12 +- .../linkml/core/v2_2_1/core.nwb.behavior.yaml | 32 +- .../linkml/core/v2_2_1/core.nwb.ecephys.yaml | 51 +++- .../linkml/core/v2_2_1/core.nwb.epoch.yaml | 6 + .../linkml/core/v2_2_1/core.nwb.file.yaml | 49 +++- .../linkml/core/v2_2_1/core.nwb.icephys.yaml | 29 +- .../linkml/core/v2_2_1/core.nwb.image.yaml | 26 +- .../linkml/core/v2_2_1/core.nwb.misc.yaml | 31 +- .../linkml/core/v2_2_1/core.nwb.ogen.yaml | 22 ++ .../linkml/core/v2_2_1/core.nwb.ophys.yaml | 53 +++- .../core/v2_2_1/core.nwb.retinotopy.yaml | 26 +- .../linkml/core/v2_2_2/core.nwb.base.yaml | 12 +- .../linkml/core/v2_2_2/core.nwb.behavior.yaml | 32 +- .../linkml/core/v2_2_2/core.nwb.ecephys.yaml | 51 +++- .../linkml/core/v2_2_2/core.nwb.epoch.yaml | 6 + .../linkml/core/v2_2_2/core.nwb.file.yaml | 49 +++- .../linkml/core/v2_2_2/core.nwb.icephys.yaml | 29 +- .../linkml/core/v2_2_2/core.nwb.image.yaml | 26 +- .../linkml/core/v2_2_2/core.nwb.misc.yaml | 31 +- .../linkml/core/v2_2_2/core.nwb.ogen.yaml | 22 ++ .../linkml/core/v2_2_2/core.nwb.ophys.yaml | 248 +++++++++++++++- .../core/v2_2_2/core.nwb.retinotopy.yaml | 28 +- .../linkml/core/v2_2_4/core.nwb.base.yaml | 12 +- .../linkml/core/v2_2_4/core.nwb.behavior.yaml | 32 +- .../linkml/core/v2_2_4/core.nwb.ecephys.yaml | 51 +++- .../linkml/core/v2_2_4/core.nwb.epoch.yaml | 6 + .../linkml/core/v2_2_4/core.nwb.file.yaml | 49 +++- .../linkml/core/v2_2_4/core.nwb.icephys.yaml | 29 +- .../linkml/core/v2_2_4/core.nwb.image.yaml | 26 +- .../linkml/core/v2_2_4/core.nwb.misc.yaml | 31 +- .../linkml/core/v2_2_4/core.nwb.ogen.yaml | 22 ++ .../linkml/core/v2_2_4/core.nwb.ophys.yaml | 256 +++++++++++++++- .../core/v2_2_4/core.nwb.retinotopy.yaml | 28 +- .../linkml/core/v2_2_5/core.nwb.base.yaml | 12 +- .../linkml/core/v2_2_5/core.nwb.behavior.yaml | 32 +- .../linkml/core/v2_2_5/core.nwb.ecephys.yaml | 51 +++- .../linkml/core/v2_2_5/core.nwb.epoch.yaml | 6 + .../linkml/core/v2_2_5/core.nwb.file.yaml | 49 +++- .../linkml/core/v2_2_5/core.nwb.icephys.yaml | 29 +- .../linkml/core/v2_2_5/core.nwb.image.yaml | 26 +- .../linkml/core/v2_2_5/core.nwb.misc.yaml | 31 +- .../linkml/core/v2_2_5/core.nwb.ogen.yaml | 22 ++ .../linkml/core/v2_2_5/core.nwb.ophys.yaml | 262 ++++++++++++++++- .../core/v2_2_5/core.nwb.retinotopy.yaml | 28 +- .../linkml/core/v2_3_0/core.nwb.base.yaml | 12 +- .../linkml/core/v2_3_0/core.nwb.behavior.yaml | 32 +- .../linkml/core/v2_3_0/core.nwb.ecephys.yaml | 51 +++- .../linkml/core/v2_3_0/core.nwb.epoch.yaml | 6 + .../linkml/core/v2_3_0/core.nwb.file.yaml | 49 +++- .../linkml/core/v2_3_0/core.nwb.icephys.yaml | 29 +- .../linkml/core/v2_3_0/core.nwb.image.yaml | 37 ++- .../linkml/core/v2_3_0/core.nwb.misc.yaml | 40 ++- .../linkml/core/v2_3_0/core.nwb.ogen.yaml | 22 ++ .../linkml/core/v2_3_0/core.nwb.ophys.yaml | 262 ++++++++++++++++- .../core/v2_3_0/core.nwb.retinotopy.yaml | 28 +- .../linkml/core/v2_4_0/core.nwb.base.yaml | 12 +- .../linkml/core/v2_4_0/core.nwb.behavior.yaml | 32 +- .../linkml/core/v2_4_0/core.nwb.ecephys.yaml | 51 +++- .../linkml/core/v2_4_0/core.nwb.epoch.yaml | 6 + .../linkml/core/v2_4_0/core.nwb.file.yaml | 49 +++- .../linkml/core/v2_4_0/core.nwb.icephys.yaml | 47 ++- .../linkml/core/v2_4_0/core.nwb.image.yaml | 37 ++- .../linkml/core/v2_4_0/core.nwb.misc.yaml | 40 ++- .../linkml/core/v2_4_0/core.nwb.ogen.yaml | 22 ++ .../linkml/core/v2_4_0/core.nwb.ophys.yaml | 262 ++++++++++++++++- .../core/v2_4_0/core.nwb.retinotopy.yaml | 28 +- .../linkml/core/v2_5_0/core.nwb.base.yaml | 23 +- .../linkml/core/v2_5_0/core.nwb.behavior.yaml | 32 +- .../linkml/core/v2_5_0/core.nwb.ecephys.yaml | 51 +++- .../linkml/core/v2_5_0/core.nwb.epoch.yaml | 9 + .../linkml/core/v2_5_0/core.nwb.file.yaml | 49 +++- .../linkml/core/v2_5_0/core.nwb.icephys.yaml | 47 ++- .../linkml/core/v2_5_0/core.nwb.image.yaml | 48 ++- .../linkml/core/v2_5_0/core.nwb.misc.yaml | 40 ++- .../linkml/core/v2_5_0/core.nwb.ogen.yaml | 22 ++ .../linkml/core/v2_5_0/core.nwb.ophys.yaml | 262 ++++++++++++++++- .../core/v2_5_0/core.nwb.retinotopy.yaml | 28 +- .../core/v2_6_0_alpha/core.nwb.base.yaml | 23 +- .../core/v2_6_0_alpha/core.nwb.behavior.yaml | 32 +- .../core/v2_6_0_alpha/core.nwb.ecephys.yaml | 51 +++- .../core/v2_6_0_alpha/core.nwb.epoch.yaml | 9 + .../core/v2_6_0_alpha/core.nwb.file.yaml | 49 +++- .../core/v2_6_0_alpha/core.nwb.icephys.yaml | 47 ++- .../core/v2_6_0_alpha/core.nwb.image.yaml | 48 ++- .../core/v2_6_0_alpha/core.nwb.misc.yaml | 40 ++- .../core/v2_6_0_alpha/core.nwb.ogen.yaml | 22 ++ .../core/v2_6_0_alpha/core.nwb.ophys.yaml | 273 +++++++++++++++++- .../v2_6_0_alpha/core.nwb.retinotopy.yaml | 28 +- .../linkml/core/v2_7_0/core.nwb.base.yaml | 23 +- .../linkml/core/v2_7_0/core.nwb.behavior.yaml | 32 +- .../linkml/core/v2_7_0/core.nwb.ecephys.yaml | 51 +++- .../linkml/core/v2_7_0/core.nwb.epoch.yaml | 9 + .../linkml/core/v2_7_0/core.nwb.file.yaml | 49 +++- .../linkml/core/v2_7_0/core.nwb.icephys.yaml | 50 +++- .../linkml/core/v2_7_0/core.nwb.image.yaml | 48 ++- .../linkml/core/v2_7_0/core.nwb.misc.yaml | 40 ++- .../linkml/core/v2_7_0/core.nwb.ogen.yaml | 22 ++ .../linkml/core/v2_7_0/core.nwb.ophys.yaml | 273 +++++++++++++++++- .../core/v2_7_0/core.nwb.retinotopy.yaml | 28 +- .../hdmf_common/v1_1_3/hdmf-common.table.yaml | 8 +- .../hdmf_common/v1_5_0/hdmf-common.base.yaml | 4 +- .../hdmf_common/v1_5_0/hdmf-common.table.yaml | 8 +- .../hdmf_common/v1_8_0/hdmf-common.base.yaml | 4 +- .../hdmf_common/v1_8_0/hdmf-common.table.yaml | 8 +- 242 files changed, 9341 insertions(+), 1449 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py index 17d7363..7557e3c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -87,7 +87,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -189,7 +189,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -241,7 +241,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py index c258f24..8d5d808 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -148,7 +148,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -163,7 +163,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -178,7 +178,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -193,7 +193,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -208,7 +208,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -223,7 +223,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -238,7 +238,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index 22d4a40..34f77fd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_2_0.core_nwb_device import Device from ...core.v2_2_0.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -116,7 +117,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -179,7 +185,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -256,7 +267,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -287,6 +303,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -298,7 +323,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -313,7 +338,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -328,7 +353,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -352,6 +377,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -406,6 +440,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py index 1cf2bb7..8627b8a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -127,7 +127,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Optional[TimeIntervalsTimeseries] = Field( @@ -137,7 +142,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py index 8acf5f9..15fdadc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_2_0.core_nwb_epoch import TimeIntervals from ...core.v2_2_0.core_nwb_misc import Units from ...core.v2_2_0.core_nwb_device import Device from ...core.v2_2_0.core_nwb_ogen import OptogeneticStimulusSite @@ -22,6 +21,7 @@ from ...core.v2_2_0.core_nwb_ecephys import ElectrodeGroup from numpydantic import NDArray, Shape from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...core.v2_2_0.core_nwb_icephys import IntracellularElectrode, SweepTable +from ...core.v2_2_0.core_nwb_epoch import TimeIntervals metamodel_version = "None" version = "2.2.0" @@ -158,19 +158,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -544,6 +534,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model NWBFile.model_rebuild() @@ -554,3 +573,4 @@ Subject.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py index 8d6da27..1242dfc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -11,6 +11,7 @@ from ...core.v2_2_0.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_0.core_nwb_device import Device from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar from pydantic import ( BaseModel, @@ -117,6 +118,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -164,7 +174,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -195,6 +205,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -271,6 +290,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -324,6 +352,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -416,6 +453,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -648,6 +694,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -724,6 +779,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -752,7 +816,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py index db8401b..8f16e8e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -75,7 +75,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -98,7 +98,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -121,7 +121,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -208,7 +208,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -223,6 +223,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -362,6 +371,15 @@ class IndexSeries(TimeSeries): description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py index 968660d..9c79866 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,6 +287,15 @@ class DecompositionSeries(TimeSeries): ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -334,7 +343,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -428,7 +437,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -438,7 +452,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -459,14 +478,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py index 02043ad..f75e675 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_2_0.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_0.core_nwb_device import Device metamodel_version = "None" version = "2.2.0" @@ -81,6 +82,15 @@ class OptogeneticSeries(TimeSeries): description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -129,6 +139,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py index 44b5845..062517f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( BeforeValidator, ) from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTableRegion, DynamicTable +from ...core.v2_2_0.core_nwb_device import Device from numpydantic import NDArray, Shape from ...core.v2_2_0.core_nwb_base import ( TimeSeriesStartingTime, @@ -117,6 +118,15 @@ class TwoPhotonSeries(ImageSeries): field_of_view: Optional[ Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -185,7 +195,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -229,7 +244,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -244,7 +259,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -259,7 +274,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[DynamicTable]] = Field( + value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) name: str = Field(...) @@ -302,6 +317,15 @@ class ImagingPlane(NWBContainer): optical_channel: OpticalChannel = Field( ..., description="""An optical channel used to record from an imaging plane.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ImagingPlaneManifold(ConfiguredBaseModel): @@ -325,7 +349,7 @@ class ImagingPlaneManifold(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* height, * width, 3 x_y_z"], float], NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], @@ -349,7 +373,7 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Measurement units for origin_coords. The default value is 'meters'.""" ) - array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( + value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -380,7 +404,7 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" ) - array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( + value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -416,7 +440,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[NWBDataInterface]] = Field( + value: Optional[List[NWBDataInterface]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index 7155a0fc..dcb153d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -101,7 +101,7 @@ class RetinotopyMap(NWBData): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -122,7 +122,7 @@ class AxisMap(RetinotopyMap): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,7 +161,7 @@ class RetinotopyImage(GrayscaleImage): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -187,35 +187,60 @@ class ImagingRetinotopy(NWBDataInterface): ..., description="""Phase response to stimulus on the first measured axis.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) axis_1_power_map: Named[Optional[AxisMap]] = Field( None, description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) axis_2_phase_map: Named[AxisMap] = Field( ..., description="""Phase response to stimulus on the second measured axis.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) axis_2_power_map: Named[Optional[AxisMap]] = Field( None, description="""Power response to stimulus on the second measured axis.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) sign_map: Named[RetinotopyMap] = Field( ..., description="""Sine of the angle between the direction of the gradient in axis_1 and axis_2.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) axis_descriptions: NDArray[Shape["2 num_axes"], str] = Field( @@ -235,7 +260,12 @@ class ImagingRetinotopy(NWBDataInterface): ..., description="""Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -273,7 +303,7 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py index 6ee8d4e..0456ea1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py @@ -133,6 +133,7 @@ from ...core.v2_2_0.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, ) from ...core.v2_2_0.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py index c37673c..228d72f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -87,7 +87,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -189,7 +189,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -241,7 +241,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py index 5020dfe..25d55da 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -148,7 +148,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -163,7 +163,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -178,7 +178,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -193,7 +193,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -208,7 +208,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -223,7 +223,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -238,7 +238,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index d8ed535..eaee082 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_2_1.core_nwb_device import Device from ...core.v2_2_1.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -116,7 +117,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -179,7 +185,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -256,7 +267,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -287,6 +303,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -298,7 +323,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -313,7 +338,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -328,7 +353,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -352,6 +377,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -406,6 +440,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py index 0113b4d..b80640c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -127,7 +127,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Optional[TimeIntervalsTimeseries] = Field( @@ -137,7 +142,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py index f2eb79e..092eaf4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_2_1.core_nwb_epoch import TimeIntervals from ...core.v2_2_1.core_nwb_misc import Units from ...core.v2_2_1.core_nwb_device import Device from ...core.v2_2_1.core_nwb_ogen import OptogeneticStimulusSite @@ -22,6 +21,7 @@ from ...core.v2_2_1.core_nwb_ecephys import ElectrodeGroup from numpydantic import NDArray, Shape from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...core.v2_2_1.core_nwb_icephys import IntracellularElectrode, SweepTable +from ...core.v2_2_1.core_nwb_epoch import TimeIntervals metamodel_version = "None" version = "2.2.1" @@ -158,19 +158,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -544,6 +534,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model NWBFile.model_rebuild() @@ -554,3 +573,4 @@ Subject.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py index 3cea17f..0109b14 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -11,6 +11,7 @@ from ...core.v2_2_1.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_1.core_nwb_device import Device from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar from pydantic import ( BaseModel, @@ -117,6 +118,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -164,7 +174,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -195,6 +205,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -271,6 +290,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -324,6 +352,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -416,6 +453,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -648,6 +694,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -724,6 +779,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -752,7 +816,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py index 89e4978..e831ea1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -75,7 +75,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -98,7 +98,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -121,7 +121,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -208,7 +208,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -223,6 +223,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -362,6 +371,15 @@ class IndexSeries(TimeSeries): description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py index 5d1881f..19c418d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,6 +287,15 @@ class DecompositionSeries(TimeSeries): ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -334,7 +343,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -428,7 +437,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -438,7 +452,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -459,14 +478,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py index 26c3fc8..37ff5c3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_2_1.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_1.core_nwb_device import Device metamodel_version = "None" version = "2.2.1" @@ -81,6 +82,15 @@ class OptogeneticSeries(TimeSeries): description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -129,6 +139,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py index 2ed7469..ba8d78a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -17,6 +17,7 @@ from pydantic import ( BeforeValidator, ) from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTableRegion, DynamicTable +from ...core.v2_2_1.core_nwb_device import Device from numpydantic import NDArray, Shape from ...core.v2_2_1.core_nwb_base import ( TimeSeriesStartingTime, @@ -117,6 +118,15 @@ class TwoPhotonSeries(ImageSeries): field_of_view: Optional[ Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -185,7 +195,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -229,7 +244,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -244,7 +259,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -259,7 +274,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[DynamicTable]] = Field( + value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) name: str = Field(...) @@ -302,6 +317,15 @@ class ImagingPlane(NWBContainer): optical_channel: OpticalChannel = Field( ..., description="""An optical channel used to record from an imaging plane.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ImagingPlaneManifold(ConfiguredBaseModel): @@ -325,7 +349,7 @@ class ImagingPlaneManifold(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* height, * width, 3 x_y_z"], float], NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], @@ -349,7 +373,7 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Measurement units for origin_coords. The default value is 'meters'.""" ) - array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( + value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -380,7 +404,7 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" ) - array: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( + value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -416,7 +440,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[NWBDataInterface]] = Field( + value: Optional[List[NWBDataInterface]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index 7095d7e..98f0ede 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -101,7 +101,7 @@ class RetinotopyMap(NWBData): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -122,7 +122,7 @@ class AxisMap(RetinotopyMap): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -161,7 +161,7 @@ class RetinotopyImage(GrayscaleImage): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -187,35 +187,60 @@ class ImagingRetinotopy(NWBDataInterface): ..., description="""Phase response to stimulus on the first measured axis.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) axis_1_power_map: Named[Optional[AxisMap]] = Field( None, description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) axis_2_phase_map: Named[AxisMap] = Field( ..., description="""Phase response to stimulus on the second measured axis.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) axis_2_power_map: Named[Optional[AxisMap]] = Field( None, description="""Power response to stimulus on the second measured axis.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) sign_map: Named[RetinotopyMap] = Field( ..., description="""Sine of the angle between the direction of the gradient in axis_1 and axis_2.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) axis_descriptions: NDArray[Shape["2 num_axes"], str] = Field( @@ -235,7 +260,12 @@ class ImagingRetinotopy(NWBDataInterface): ..., description="""Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -273,7 +303,7 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py index 5ef5e82..c5d3cba 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py @@ -133,6 +133,7 @@ from ...core.v2_2_1.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, ) from ...core.v2_2_1.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py index 0b34abd..14eaf16 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -87,7 +87,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -189,7 +189,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -241,7 +241,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py index 3b1a5fb..d19da8f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -148,7 +148,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -163,7 +163,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -178,7 +178,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -193,7 +193,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -208,7 +208,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -223,7 +223,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -238,7 +238,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index ac58cf4..06bad97 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_2_2.core_nwb_device import Device from ...core.v2_2_2.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -116,7 +117,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -179,7 +185,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -256,7 +267,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -287,6 +303,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -298,7 +323,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -313,7 +338,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -328,7 +353,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -352,6 +377,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -406,6 +440,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py index 31d8da0..11b17cf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -127,7 +127,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Optional[TimeIntervalsTimeseries] = Field( @@ -137,7 +142,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -187,7 +197,7 @@ class TimeIntervalsTimeseries(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py index 4479d53..128b7a4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_2_2.core_nwb_epoch import TimeIntervals from ...core.v2_2_2.core_nwb_misc import Units from ...core.v2_2_2.core_nwb_device import Device from ...core.v2_2_2.core_nwb_ogen import OptogeneticStimulusSite @@ -22,6 +21,7 @@ from ...core.v2_2_2.core_nwb_ecephys import ElectrodeGroup from numpydantic import NDArray, Shape from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...core.v2_2_2.core_nwb_icephys import IntracellularElectrode, SweepTable +from ...core.v2_2_2.core_nwb_epoch import TimeIntervals metamodel_version = "None" version = "2.2.2" @@ -158,19 +158,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -544,6 +534,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model NWBFile.model_rebuild() @@ -554,3 +573,4 @@ Subject.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py index c34073a..d48fcbd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -11,6 +11,7 @@ from ...core.v2_2_2.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_2.core_nwb_device import Device from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar from pydantic import ( BaseModel, @@ -117,6 +118,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -164,7 +174,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -195,6 +205,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -271,6 +290,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -324,6 +352,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -416,6 +453,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -648,6 +694,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -724,6 +779,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -752,7 +816,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py index 92928c5..b161444 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -75,7 +75,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -98,7 +98,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -121,7 +121,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -208,7 +208,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -223,6 +223,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -360,6 +369,15 @@ class IndexSeries(TimeSeries): description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py index 5f1e06a..cfdfb58 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,6 +287,15 @@ class DecompositionSeries(TimeSeries): ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -334,7 +343,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -428,7 +437,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -438,7 +452,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -459,14 +478,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( @@ -524,7 +553,7 @@ class UnitsSpikeTimes(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py index b34d130..2b0d4f5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_2_2.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_2.core_nwb_device import Device metamodel_version = "None" version = "2.2.2" @@ -81,6 +82,15 @@ class OptogeneticSeries(TimeSeries): description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -129,6 +139,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py index 3fe9760..e4c1979 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -16,8 +16,9 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) -from numpydantic import NDArray, Shape from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTableRegion, DynamicTable +from ...core.v2_2_2.core_nwb_device import Device +from numpydantic import NDArray, Shape from ...core.v2_2_2.core_nwb_base import ( TimeSeriesStartingTime, TimeSeriesSync, @@ -117,6 +118,15 @@ class TwoPhotonSeries(ImageSeries): field_of_view: Optional[ Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -185,7 +195,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -229,7 +244,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -244,7 +259,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -259,7 +274,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[DynamicTable]] = Field( + value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) name: str = Field(...) @@ -274,10 +289,146 @@ class ImagingPlane(NWBContainer): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[NWBContainer]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "NWBContainer"}]}} - ) name: str = Field(...) + description: Optional[str] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""") + indicator: str = Field(..., description="""Calcium indicator.""") + location: str = Field( + ..., + description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + manifold: Optional[ImagingPlaneManifold] = Field( + None, + description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""", + ) + origin_coords: Optional[ImagingPlaneOriginCoords] = Field( + None, + description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""", + ) + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field( + None, + description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""", + ) + reference_frame: Optional[str] = Field( + None, + description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""", + ) + optical_channel: List[OpticalChannel] = Field( + ..., description="""An optical channel used to record from an imaging plane.""" + ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["manifold"] = Field( + "manifold", + json_schema_extra={ + "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} + }, + ) + conversion: Optional[float] = Field( + None, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + ) + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], + ] + ] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["origin_coords"] = Field( + "origin_coords", + json_schema_extra={ + "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + ) + value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "x_y", "exact_cardinality": 2}, + {"alias": "x_y_z", "exact_cardinality": 3}, + ] + } + } + }, + ) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["grid_spacing"] = Field( + "grid_spacing", + json_schema_extra={ + "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + ) + value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "x_y", "exact_cardinality": 2}, + {"alias": "x_y_z", "exact_cardinality": 3}, + ] + } + } + }, + ) + + +class OpticalChannel(NWBContainer): + """ + An optical channel used to record from an imaging plane. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: str = Field(...) + description: str = Field(..., description="""Description or other notes about the channel.""") + emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") class MotionCorrection(NWBDataInterface): @@ -289,7 +440,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[NWBDataInterface]] = Field( + value: Optional[List[NWBDataInterface]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "NWBDataInterface"}]}} ) name: str = Field(...) @@ -303,4 +454,8 @@ DfOverF.model_rebuild() Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() ImagingPlane.model_rebuild() +ImagingPlaneManifold.model_rebuild() +ImagingPlaneOriginCoords.model_rebuild() +ImagingPlaneGridSpacing.model_rebuild() +OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index 8ba1ae3..c2d0c5f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -135,7 +135,7 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -167,7 +167,7 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -199,7 +199,7 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -231,7 +231,7 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -268,7 +268,7 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -294,7 +294,7 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -330,7 +330,7 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py index 8a011e2..7b96622 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py @@ -52,6 +52,10 @@ from ...core.v2_2_2.core_nwb_ophys import ( Fluorescence, ImageSegmentation, ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, + OpticalChannel, MotionCorrection, ) from ...core.v2_2_2.core_nwb_device import Device @@ -132,6 +136,7 @@ from ...core.v2_2_2.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, ) from ...core.v2_2_2.core_nwb_epoch import TimeIntervals, TimeIntervalsTimeseries diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py index 8b60948..bb4ef4e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -87,7 +87,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -189,7 +189,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -241,7 +241,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py index 6bd2d26..093f0b8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -148,7 +148,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -163,7 +163,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -178,7 +178,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -193,7 +193,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -208,7 +208,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -223,7 +223,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -238,7 +238,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index 8e8528b..0138a95 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_2_4.core_nwb_device import Device from ...core.v2_2_4.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -116,7 +117,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -179,7 +185,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -256,7 +267,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -287,6 +303,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -298,7 +323,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -313,7 +338,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -328,7 +353,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -352,6 +377,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -406,6 +440,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 4f1a792..60ec786 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -127,7 +127,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Optional[TimeIntervalsTimeseries] = Field( @@ -137,7 +142,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -187,7 +197,7 @@ class TimeIntervalsTimeseries(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py index 22f8f82..0b716c0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_2_4.core_nwb_epoch import TimeIntervals from ...core.v2_2_4.core_nwb_misc import Units from ...core.v2_2_4.core_nwb_device import Device from ...core.v2_2_4.core_nwb_ogen import OptogeneticStimulusSite @@ -16,6 +15,7 @@ from ...core.v2_2_4.core_nwb_ecephys import ElectrodeGroup from numpydantic import NDArray, Shape from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...core.v2_2_4.core_nwb_icephys import IntracellularElectrode, SweepTable +from ...core.v2_2_4.core_nwb_epoch import TimeIntervals from ...core.v2_2_4.core_nwb_base import ( NWBData, NWBContainer, @@ -174,19 +174,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -522,6 +512,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + class LabMetaData(NWBContainer): """ Lab-specific meta-data. @@ -579,5 +598,6 @@ GeneralSourceScript.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py index f0a8769..0af6814 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -11,6 +11,7 @@ from ...core.v2_2_4.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_4.core_nwb_device import Device from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar from pydantic import ( BaseModel, @@ -117,6 +118,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -164,7 +174,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -195,6 +205,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -271,6 +290,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -324,6 +352,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -416,6 +453,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -648,6 +694,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -724,6 +779,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -752,7 +816,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py index 09f7cf1..a1ababb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -75,7 +75,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -98,7 +98,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -121,7 +121,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -208,7 +208,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -223,6 +223,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -360,6 +369,15 @@ class IndexSeries(TimeSeries): description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py index 9cd8b01..51b8cfc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,6 +287,15 @@ class DecompositionSeries(TimeSeries): ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -334,7 +343,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -428,7 +437,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -438,7 +452,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -459,14 +478,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( @@ -524,7 +553,7 @@ class UnitsSpikeTimes(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py index 6f6995d..73f6642 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_2_4.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_4.core_nwb_device import Device metamodel_version = "None" version = "2.2.4" @@ -81,6 +82,15 @@ class OptogeneticSeries(TimeSeries): description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -129,6 +139,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py index 8506e83..3c2ebd8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -21,8 +21,8 @@ from ...hdmf_common.v1_1_3.hdmf_common_table import ( VectorIndex, VectorData, ) +from ...core.v2_2_4.core_nwb_device import Device from numpydantic import NDArray, Shape -from ...core.v2_2_4.core_nwb_image import ImageSeries, ImageSeriesExternalFile from ...core.v2_2_4.core_nwb_base import ( TimeSeriesStartingTime, TimeSeriesSync, @@ -30,6 +30,7 @@ from ...core.v2_2_4.core_nwb_base import ( NWBDataInterface, NWBContainer, ) +from ...core.v2_2_4.core_nwb_image import ImageSeries, ImageSeriesExternalFile metamodel_version = "None" version = "2.2.4" @@ -122,6 +123,15 @@ class TwoPhotonSeries(ImageSeries): field_of_view: Optional[ Union[NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height"], float]] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -190,7 +200,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -234,7 +249,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -249,7 +264,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -264,7 +279,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[PlaneSegmentation]] = Field( + value: Optional[List[PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) name: str = Field(...) @@ -288,7 +303,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into pixel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( @@ -299,7 +319,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into voxel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( @@ -311,6 +336,15 @@ class PlaneSegmentation(DynamicTable): description="""Image stacks that the segmentation masks apply to.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}}, ) + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -347,7 +381,7 @@ class PlaneSegmentationImageMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -376,7 +410,7 @@ class PlaneSegmentationPixelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -406,7 +440,7 @@ class PlaneSegmentationVoxelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -425,10 +459,137 @@ class ImagingPlane(NWBContainer): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[OpticalChannel]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}} - ) name: str = Field(...) + description: Optional[str] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: Optional[float] = Field( + None, + description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""", + ) + indicator: str = Field(..., description="""Calcium indicator.""") + location: str = Field( + ..., + description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + manifold: Optional[ImagingPlaneManifold] = Field( + None, + description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""", + ) + origin_coords: Optional[ImagingPlaneOriginCoords] = Field( + None, + description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""", + ) + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field( + None, + description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""", + ) + reference_frame: Optional[str] = Field( + None, + description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""", + ) + optical_channel: List[OpticalChannel] = Field( + ..., description="""An optical channel used to record from an imaging plane.""" + ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["manifold"] = Field( + "manifold", + json_schema_extra={ + "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} + }, + ) + conversion: Optional[float] = Field( + None, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + ) + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], + ] + ] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["origin_coords"] = Field( + "origin_coords", + json_schema_extra={ + "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + ) + value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "x_y", "exact_cardinality": 2}, + {"alias": "x_y_z", "exact_cardinality": 3}, + ] + } + } + }, + ) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["grid_spacing"] = Field( + "grid_spacing", + json_schema_extra={ + "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + ) + value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "x_y", "exact_cardinality": 2}, + {"alias": "x_y_z", "exact_cardinality": 3}, + ] + } + } + }, + ) class OpticalChannel(NWBContainer): @@ -454,7 +615,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[CorrectedImageStack]] = Field( + value: Optional[List[CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) name: str = Field(...) @@ -477,6 +638,15 @@ class CorrectedImageStack(NWBDataInterface): ..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""", ) + original: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) # Model rebuild @@ -491,6 +661,9 @@ PlaneSegmentationImageMask.model_rebuild() PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() +ImagingPlaneManifold.model_rebuild() +ImagingPlaneOriginCoords.model_rebuild() +ImagingPlaneGridSpacing.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() CorrectedImageStack.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index e1dfcb7..fc5368f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -135,7 +135,7 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -167,7 +167,7 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -199,7 +199,7 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -231,7 +231,7 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -268,7 +268,7 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -294,7 +294,7 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -330,7 +330,7 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py index 83cb593..2017869 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py @@ -56,6 +56,9 @@ from ...core.v2_2_4.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, OpticalChannel, MotionCorrection, CorrectedImageStack, @@ -138,6 +141,7 @@ from ...core.v2_2_4.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py index 4e739b3..64796e9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -87,7 +87,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -189,7 +189,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -241,7 +241,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py index 01ed338..77b111a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -148,7 +148,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -163,7 +163,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -178,7 +178,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -193,7 +193,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -208,7 +208,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -223,7 +223,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -238,7 +238,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index ef56d6a..99b6083 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_2_5.core_nwb_device import Device from ...core.v2_2_5.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -116,7 +117,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -179,7 +185,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -256,7 +267,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -287,6 +303,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -298,7 +323,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -313,7 +338,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -328,7 +353,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -352,6 +377,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -406,6 +440,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py index 9ce71f4..ad80c60 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -127,7 +127,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Optional[TimeIntervalsTimeseries] = Field( @@ -137,7 +142,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -187,7 +197,7 @@ class TimeIntervalsTimeseries(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py index 9f6c0c9..a0344e1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_2_5.core_nwb_epoch import TimeIntervals from ...core.v2_2_5.core_nwb_misc import Units from ...core.v2_2_5.core_nwb_device import Device from ...core.v2_2_5.core_nwb_ogen import OptogeneticStimulusSite @@ -16,6 +15,7 @@ from ...core.v2_2_5.core_nwb_ecephys import ElectrodeGroup from numpydantic import NDArray, Shape from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, VectorData, VectorIndex from ...core.v2_2_5.core_nwb_icephys import IntracellularElectrode, SweepTable +from ...core.v2_2_5.core_nwb_epoch import TimeIntervals from ...core.v2_2_5.core_nwb_base import ( NWBData, NWBContainer, @@ -174,19 +174,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -522,6 +512,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + class LabMetaData(NWBContainer): """ Lab-specific meta-data. @@ -579,5 +598,6 @@ GeneralSourceScript.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py index 795f4b4..0cdf6b1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -11,6 +11,7 @@ from ...core.v2_2_5.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_5.core_nwb_device import Device from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar from pydantic import ( BaseModel, @@ -117,6 +118,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -164,7 +174,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -195,6 +205,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -271,6 +290,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -324,6 +352,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -416,6 +453,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -648,6 +694,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -724,6 +779,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -752,7 +816,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py index 7b0a102..e6790fa 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -75,7 +75,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -98,7 +98,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -121,7 +121,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -208,7 +208,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -223,6 +223,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -360,6 +369,15 @@ class IndexSeries(TimeSeries): description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py index 6fff7db..c3f845e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,6 +287,15 @@ class DecompositionSeries(TimeSeries): ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -334,7 +343,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -428,7 +437,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -438,7 +452,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -459,14 +478,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( @@ -524,7 +553,7 @@ class UnitsSpikeTimes(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py index d6c46a3..c30f06b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_2_5.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_2_5.core_nwb_device import Device metamodel_version = "None" version = "2.2.5" @@ -81,6 +82,15 @@ class OptogeneticSeries(TimeSeries): description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -129,6 +139,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py index 32129b6..6849817 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -21,8 +21,8 @@ from ...hdmf_common.v1_1_3.hdmf_common_table import ( VectorIndex, VectorData, ) +from ...core.v2_2_5.core_nwb_device import Device from numpydantic import NDArray, Shape -from ...core.v2_2_5.core_nwb_image import ImageSeries, ImageSeriesExternalFile from ...core.v2_2_5.core_nwb_base import ( TimeSeriesStartingTime, TimeSeriesSync, @@ -30,6 +30,7 @@ from ...core.v2_2_5.core_nwb_base import ( NWBDataInterface, NWBContainer, ) +from ...core.v2_2_5.core_nwb_image import ImageSeries, ImageSeriesExternalFile metamodel_version = "None" version = "2.2.5" @@ -124,6 +125,15 @@ class TwoPhotonSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -192,7 +202,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -236,7 +251,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -251,7 +266,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -266,7 +281,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[PlaneSegmentation]] = Field( + value: Optional[List[PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) name: str = Field(...) @@ -290,7 +305,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into pixel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( @@ -301,7 +321,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into voxel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( @@ -313,6 +338,15 @@ class PlaneSegmentation(DynamicTable): description="""Image stacks that the segmentation masks apply to.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}}, ) + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -349,7 +383,7 @@ class PlaneSegmentationImageMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -378,7 +412,7 @@ class PlaneSegmentationPixelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -408,7 +442,7 @@ class PlaneSegmentationVoxelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -427,10 +461,117 @@ class ImagingPlane(NWBContainer): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[OpticalChannel]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}} - ) name: str = Field(...) + description: Optional[str] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: Optional[float] = Field( + None, + description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""", + ) + indicator: str = Field(..., description="""Calcium indicator.""") + location: str = Field( + ..., + description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + manifold: Optional[ImagingPlaneManifold] = Field( + None, + description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""", + ) + origin_coords: Optional[ImagingPlaneOriginCoords] = Field( + None, + description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""", + ) + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field( + None, + description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""", + ) + reference_frame: Optional[str] = Field( + None, + description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""", + ) + optical_channel: List[OpticalChannel] = Field( + ..., description="""An optical channel used to record from an imaging plane.""" + ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["manifold"] = Field( + "manifold", + json_schema_extra={ + "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} + }, + ) + conversion: Optional[float] = Field( + None, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + ) + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], + ] + ] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["origin_coords"] = Field( + "origin_coords", + json_schema_extra={ + "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["grid_spacing"] = Field( + "grid_spacing", + json_schema_extra={ + "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) class OpticalChannel(NWBContainer): @@ -456,7 +597,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[CorrectedImageStack]] = Field( + value: Optional[List[CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) name: str = Field(...) @@ -479,6 +620,15 @@ class CorrectedImageStack(NWBDataInterface): ..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""", ) + original: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) # Model rebuild @@ -493,6 +643,9 @@ PlaneSegmentationImageMask.model_rebuild() PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() +ImagingPlaneManifold.model_rebuild() +ImagingPlaneOriginCoords.model_rebuild() +ImagingPlaneGridSpacing.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() CorrectedImageStack.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index fe8df70..79dacf3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -135,7 +135,7 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -167,7 +167,7 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -199,7 +199,7 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -231,7 +231,7 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -268,7 +268,7 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -294,7 +294,7 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -330,7 +330,7 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py index efff00e..56b42a0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py @@ -56,6 +56,9 @@ from ...core.v2_2_5.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, OpticalChannel, MotionCorrection, CorrectedImageStack, @@ -138,6 +141,7 @@ from ...core.v2_2_5.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py index 5d6e07d..c3683bc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -92,7 +92,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -198,7 +198,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -250,7 +250,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py index 94f5843..a469b16 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -148,7 +148,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -163,7 +163,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -178,7 +178,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -193,7 +193,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -208,7 +208,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -223,7 +223,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -238,7 +238,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index c0b9368..19de749 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_3_0.core_nwb_device import Device from ...core.v2_3_0.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -120,7 +121,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -187,7 +193,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -264,7 +275,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -295,6 +311,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -306,7 +331,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -321,7 +346,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -336,7 +361,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -360,6 +385,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -414,6 +448,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py index 1466993..adfa639 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -127,7 +127,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Optional[TimeIntervalsTimeseries] = Field( @@ -137,7 +142,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -184,7 +194,7 @@ class TimeIntervalsTimeseries(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py index 499296e..03e0af6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_3_0.core_nwb_epoch import TimeIntervals from ...core.v2_3_0.core_nwb_misc import Units from ...core.v2_3_0.core_nwb_device import Device from ...core.v2_3_0.core_nwb_ogen import OptogeneticStimulusSite @@ -16,6 +15,7 @@ from ...core.v2_3_0.core_nwb_ecephys import ElectrodeGroup from numpydantic import NDArray, Shape from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, VectorData from ...core.v2_3_0.core_nwb_icephys import IntracellularElectrode, SweepTable +from ...core.v2_3_0.core_nwb_epoch import TimeIntervals from ...core.v2_3_0.core_nwb_base import ( NWBData, NWBContainer, @@ -174,19 +174,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -519,6 +509,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + class LabMetaData(NWBContainer): """ Lab-specific meta-data. @@ -577,5 +596,6 @@ GeneralSourceScript.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py index db95e72..2c8abe2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -11,6 +11,7 @@ from ...core.v2_3_0.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_3_0.core_nwb_device import Device from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar from pydantic import ( BaseModel, @@ -117,6 +118,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -164,7 +174,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -195,6 +205,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -272,6 +291,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -325,6 +353,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -417,6 +454,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -649,6 +695,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -725,6 +780,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -753,7 +817,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py index 356fc99..fbe685b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -7,6 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from ...core.v2_3_0.core_nwb_device import Device from numpydantic import NDArray, Shape from ...core.v2_3_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -75,7 +76,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -98,7 +99,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -121,7 +122,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -159,6 +160,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -208,7 +218,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -223,6 +233,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -242,6 +261,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -313,6 +341,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -360,6 +397,15 @@ class IndexSeries(TimeSeries): description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py index 01fcbf8..cbc95a2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,13 +287,27 @@ class DecompositionSeries(TimeSeries): None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) bands: DecompositionSeriesBands = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -341,7 +355,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -432,7 +446,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -442,7 +461,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -463,14 +487,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( @@ -501,14 +535,24 @@ class Units(DynamicTable): None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) waveforms_index_index: Named[Optional[VectorIndex]] = Field( None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -548,7 +592,7 @@ class UnitsSpikeTimes(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py index aa52534..aa317fb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_3_0.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_3_0.core_nwb_device import Device metamodel_version = "None" version = "2.3.0" @@ -81,6 +82,15 @@ class OptogeneticSeries(TimeSeries): description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -129,6 +139,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py index 7de8e48..48ffbff 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -21,8 +21,8 @@ from ...hdmf_common.v1_5_0.hdmf_common_table import ( VectorIndex, VectorData, ) +from ...core.v2_3_0.core_nwb_device import Device from numpydantic import NDArray, Shape -from ...core.v2_3_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile from ...core.v2_3_0.core_nwb_base import ( TimeSeriesStartingTime, TimeSeriesSync, @@ -30,6 +30,7 @@ from ...core.v2_3_0.core_nwb_base import ( NWBDataInterface, NWBContainer, ) +from ...core.v2_3_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile metamodel_version = "None" version = "2.3.0" @@ -124,6 +125,15 @@ class TwoPhotonSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Optional[ Union[ NDArray[Shape["* frame, * x, * y"], float], @@ -143,6 +153,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -192,7 +211,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -236,7 +260,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -251,7 +275,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -266,7 +290,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[PlaneSegmentation]] = Field( + value: Optional[List[PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) name: str = Field(...) @@ -290,7 +314,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into pixel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( @@ -301,7 +330,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into voxel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( @@ -313,6 +347,15 @@ class PlaneSegmentation(DynamicTable): description="""Image stacks that the segmentation masks apply to.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}}, ) + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -346,7 +389,7 @@ class PlaneSegmentationImageMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -375,7 +418,7 @@ class PlaneSegmentationPixelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -405,7 +448,7 @@ class PlaneSegmentationVoxelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -424,10 +467,117 @@ class ImagingPlane(NWBContainer): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[OpticalChannel]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}} - ) name: str = Field(...) + description: Optional[str] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: Optional[float] = Field( + None, + description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""", + ) + indicator: str = Field(..., description="""Calcium indicator.""") + location: str = Field( + ..., + description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + manifold: Optional[ImagingPlaneManifold] = Field( + None, + description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""", + ) + origin_coords: Optional[ImagingPlaneOriginCoords] = Field( + None, + description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""", + ) + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field( + None, + description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""", + ) + reference_frame: Optional[str] = Field( + None, + description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""", + ) + optical_channel: List[OpticalChannel] = Field( + ..., description="""An optical channel used to record from an imaging plane.""" + ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["manifold"] = Field( + "manifold", + json_schema_extra={ + "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} + }, + ) + conversion: Optional[float] = Field( + None, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + ) + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], + ] + ] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["origin_coords"] = Field( + "origin_coords", + json_schema_extra={ + "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["grid_spacing"] = Field( + "grid_spacing", + json_schema_extra={ + "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) class OpticalChannel(NWBContainer): @@ -453,7 +603,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[CorrectedImageStack]] = Field( + value: Optional[List[CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) name: str = Field(...) @@ -476,6 +626,15 @@ class CorrectedImageStack(NWBDataInterface): ..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""", ) + original: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) # Model rebuild @@ -490,6 +649,9 @@ PlaneSegmentationImageMask.model_rebuild() PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() +ImagingPlaneManifold.model_rebuild() +ImagingPlaneOriginCoords.model_rebuild() +ImagingPlaneGridSpacing.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() CorrectedImageStack.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 2f9da18..3f2c3c9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -135,7 +135,7 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -167,7 +167,7 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -199,7 +199,7 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -231,7 +231,7 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -268,7 +268,7 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -294,7 +294,7 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -330,7 +330,7 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py index 9669627..fe306d5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py @@ -59,6 +59,9 @@ from ...core.v2_3_0.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, OpticalChannel, MotionCorrection, CorrectedImageStack, @@ -141,6 +144,7 @@ from ...core.v2_3_0.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py index 1dac0d5..38da923 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -102,7 +102,7 @@ class TimeSeriesReferenceVectorData(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -126,7 +126,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -232,7 +232,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -284,7 +284,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py index 6077059..2d75ba0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -148,7 +148,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -163,7 +163,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -178,7 +178,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -193,7 +193,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -208,7 +208,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -223,7 +223,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -238,7 +238,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index 978abba..7099d5a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_4_0.core_nwb_device import Device from ...core.v2_4_0.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -120,7 +121,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -187,7 +193,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -264,7 +275,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -295,6 +311,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -306,7 +331,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -321,7 +346,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -336,7 +361,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -360,6 +385,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -414,6 +448,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 0cd6107..3a0c3fb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -127,7 +127,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Optional[TimeIntervalsTimeseries] = Field( @@ -137,7 +142,12 @@ class TimeIntervals(DynamicTable): None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -184,7 +194,7 @@ class TimeIntervalsTimeseries(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py index 6f128d3..de70d34 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_4_0.core_nwb_epoch import TimeIntervals from ...core.v2_4_0.core_nwb_misc import Units from ...core.v2_4_0.core_nwb_device import Device from ...core.v2_4_0.core_nwb_ogen import OptogeneticStimulusSite @@ -24,6 +23,7 @@ from ...core.v2_4_0.core_nwb_icephys import ( RepetitionsTable, ExperimentalConditionsTable, ) +from ...core.v2_4_0.core_nwb_epoch import TimeIntervals from ...core.v2_4_0.core_nwb_base import ( NWBData, NWBContainer, @@ -182,19 +182,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -548,6 +538,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + class LabMetaData(NWBContainer): """ Lab-specific meta-data. @@ -606,5 +625,6 @@ GeneralSourceScript.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py index 760097b..8d6b370 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -5,6 +5,7 @@ from enum import Enum import re import sys import numpy as np +from ...core.v2_4_0.core_nwb_device import Device from ...core.v2_4_0.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -124,6 +125,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -171,7 +181,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -202,6 +212,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -279,6 +298,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -332,6 +360,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -424,6 +461,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -656,6 +702,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -732,6 +787,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -760,7 +824,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -827,7 +896,12 @@ class IntracellularStimuliTable(DynamicTable): ..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -861,7 +935,12 @@ class IntracellularResponsesTable(DynamicTable): ..., description="""Column storing the reference to the recorded response for the recording (rows)""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -909,7 +988,7 @@ class IntracellularRecordingsTable(AlignedDynamicTable): responses: IntracellularResponsesTable = Field( ..., description="""Table for storing intracellular response related metadata.""" ) - children: Optional[List[DynamicTable]] = Field( + value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) colnames: Optional[str] = Field( @@ -952,7 +1031,12 @@ class SimultaneousRecordingsTable(DynamicTable): ..., description="""Index dataset for the recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -992,7 +1076,7 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1028,7 +1112,12 @@ class SequentialRecordingsTable(DynamicTable): ..., description="""Index dataset for the simultaneous_recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) stimulus_type: NDArray[Any, str] = Field( @@ -1080,7 +1169,7 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1113,7 +1202,12 @@ class RepetitionsTable(DynamicTable): ..., description="""Index dataset for the sequential_recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -1156,7 +1250,7 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1191,7 +1285,12 @@ class ExperimentalConditionsTable(DynamicTable): ..., description="""Index dataset for the repetitions column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -1231,7 +1330,7 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py index e940464..249545b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -7,6 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from ...core.v2_4_0.core_nwb_device import Device from numpydantic import NDArray, Shape from ...core.v2_4_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync @@ -75,7 +76,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -98,7 +99,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -121,7 +122,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -159,6 +160,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -208,7 +218,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -223,6 +233,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -242,6 +261,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -313,6 +341,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -360,6 +397,15 @@ class IndexSeries(TimeSeries): description="""Index of the frame in the referenced ImageSeries.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py index 540d30f..804a7c2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,13 +287,27 @@ class DecompositionSeries(TimeSeries): None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) bands: DecompositionSeriesBands = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -341,7 +355,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -432,7 +446,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -442,7 +461,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -463,14 +487,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( @@ -501,14 +535,24 @@ class Units(DynamicTable): None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) waveforms_index_index: Named[Optional[VectorIndex]] = Field( None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -548,7 +592,7 @@ class UnitsSpikeTimes(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py index 8c72013..1720289 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_4_0.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_4_0.core_nwb_device import Device metamodel_version = "None" version = "2.4.0" @@ -81,6 +82,15 @@ class OptogeneticSeries(TimeSeries): description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -129,6 +139,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 24dd9a9..9fabdb8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -21,8 +21,8 @@ from ...hdmf_common.v1_5_0.hdmf_common_table import ( VectorIndex, VectorData, ) +from ...core.v2_4_0.core_nwb_device import Device from numpydantic import NDArray, Shape -from ...core.v2_4_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile from ...core.v2_4_0.core_nwb_base import ( TimeSeriesStartingTime, TimeSeriesSync, @@ -30,6 +30,7 @@ from ...core.v2_4_0.core_nwb_base import ( NWBDataInterface, NWBContainer, ) +from ...core.v2_4_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile metamodel_version = "None" version = "2.4.0" @@ -124,6 +125,15 @@ class TwoPhotonSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -143,6 +153,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -192,7 +211,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -236,7 +260,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -251,7 +275,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -266,7 +290,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[PlaneSegmentation]] = Field( + value: Optional[List[PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) name: str = Field(...) @@ -290,7 +314,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into pixel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( @@ -301,7 +330,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into voxel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( @@ -313,6 +347,15 @@ class PlaneSegmentation(DynamicTable): description="""Image stacks that the segmentation masks apply to.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}}, ) + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -346,7 +389,7 @@ class PlaneSegmentationImageMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -375,7 +418,7 @@ class PlaneSegmentationPixelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -405,7 +448,7 @@ class PlaneSegmentationVoxelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -424,10 +467,117 @@ class ImagingPlane(NWBContainer): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[OpticalChannel]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}} - ) name: str = Field(...) + description: Optional[str] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: Optional[float] = Field( + None, + description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""", + ) + indicator: str = Field(..., description="""Calcium indicator.""") + location: str = Field( + ..., + description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + manifold: Optional[ImagingPlaneManifold] = Field( + None, + description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""", + ) + origin_coords: Optional[ImagingPlaneOriginCoords] = Field( + None, + description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""", + ) + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field( + None, + description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""", + ) + reference_frame: Optional[str] = Field( + None, + description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""", + ) + optical_channel: List[OpticalChannel] = Field( + ..., description="""An optical channel used to record from an imaging plane.""" + ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["manifold"] = Field( + "manifold", + json_schema_extra={ + "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} + }, + ) + conversion: Optional[float] = Field( + None, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + ) + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], + ] + ] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["origin_coords"] = Field( + "origin_coords", + json_schema_extra={ + "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["grid_spacing"] = Field( + "grid_spacing", + json_schema_extra={ + "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) class OpticalChannel(NWBContainer): @@ -453,7 +603,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[CorrectedImageStack]] = Field( + value: Optional[List[CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) name: str = Field(...) @@ -476,6 +626,15 @@ class CorrectedImageStack(NWBDataInterface): ..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""", ) + original: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) # Model rebuild @@ -490,6 +649,9 @@ PlaneSegmentationImageMask.model_rebuild() PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() +ImagingPlaneManifold.model_rebuild() +ImagingPlaneOriginCoords.model_rebuild() +ImagingPlaneGridSpacing.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() CorrectedImageStack.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index 501665e..f75b79b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -135,7 +135,7 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -167,7 +167,7 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -199,7 +199,7 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -231,7 +231,7 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -268,7 +268,7 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -294,7 +294,7 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -330,7 +330,7 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py index 664e6a7..bc7052a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py @@ -60,6 +60,9 @@ from ...core.v2_4_0.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, OpticalChannel, MotionCorrection, CorrectedImageStack, @@ -154,6 +157,7 @@ from ...core.v2_4_0.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py index 96da748..0f2fc41 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -126,7 +126,7 @@ class TimeSeriesReferenceVectorData(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -150,7 +150,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -169,8 +169,14 @@ class ImageReferences(NWBData): ) name: str = Field(...) - image: List[Image] = Field( - ..., description="""Ordered dataset of references to Image objects.""" + value: List[Image] = Field( + ..., + description="""Ordered dataset of references to Image objects.""", + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "reference"}} + } + }, ) @@ -275,7 +281,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -327,7 +333,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} @@ -354,7 +360,12 @@ class Images(NWBDataInterface): None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py index 246b481..ffe0f14 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, 1 x"], float], @@ -150,7 +150,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -165,7 +165,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -180,7 +180,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -195,7 +195,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -210,7 +210,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -225,7 +225,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -240,7 +240,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index 671682b..997828f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_5_0.core_nwb_device import Device from ...core.v2_5_0.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -120,7 +121,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -187,7 +193,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -264,7 +275,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -295,6 +311,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -306,7 +331,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -321,7 +346,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -336,7 +361,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -360,6 +385,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -414,6 +448,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py index e728288..c399761 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -127,21 +127,36 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Named[Optional[TimeSeriesReferenceVectorData]] = Field( None, description="""An index into a TimeSeries object.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries_index: Named[Optional[VectorIndex]] = Field( None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py index 37dfb63..541ba5e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_5_0.core_nwb_epoch import TimeIntervals from ...core.v2_5_0.core_nwb_misc import Units from ...core.v2_5_0.core_nwb_device import Device from ...core.v2_5_0.core_nwb_ogen import OptogeneticStimulusSite @@ -24,6 +23,7 @@ from ...core.v2_5_0.core_nwb_icephys import ( RepetitionsTable, ExperimentalConditionsTable, ) +from ...core.v2_5_0.core_nwb_epoch import TimeIntervals from ...core.v2_5_0.core_nwb_base import ( NWBData, NWBContainer, @@ -183,19 +183,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -551,6 +541,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + class LabMetaData(NWBContainer): """ Lab-specific meta-data. @@ -609,5 +628,6 @@ GeneralSourceScript.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py index 1b741f1..b1c9fce 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -5,6 +5,7 @@ from enum import Enum import re import sys import numpy as np +from ...core.v2_5_0.core_nwb_device import Device from ...core.v2_5_0.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -124,6 +125,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -171,7 +181,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -202,6 +212,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -279,6 +298,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -332,6 +360,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -424,6 +461,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -656,6 +702,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -733,6 +788,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -761,7 +825,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -828,7 +897,12 @@ class IntracellularStimuliTable(DynamicTable): ..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -862,7 +936,12 @@ class IntracellularResponsesTable(DynamicTable): ..., description="""Column storing the reference to the recorded response for the recording (rows)""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -910,7 +989,7 @@ class IntracellularRecordingsTable(AlignedDynamicTable): responses: IntracellularResponsesTable = Field( ..., description="""Table for storing intracellular response related metadata.""" ) - children: Optional[List[DynamicTable]] = Field( + value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) colnames: Optional[str] = Field( @@ -953,7 +1032,12 @@ class SimultaneousRecordingsTable(DynamicTable): ..., description="""Index dataset for the recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -993,7 +1077,7 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1029,7 +1113,12 @@ class SequentialRecordingsTable(DynamicTable): ..., description="""Index dataset for the simultaneous_recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) stimulus_type: NDArray[Any, str] = Field( @@ -1081,7 +1170,7 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1114,7 +1203,12 @@ class RepetitionsTable(DynamicTable): ..., description="""Index dataset for the sequential_recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -1157,7 +1251,7 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1192,7 +1286,12 @@ class ExperimentalConditionsTable(DynamicTable): ..., description="""Index dataset for the repetitions column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -1232,7 +1331,7 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py index 209487e..91baa83 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -7,8 +7,15 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from ...core.v2_5_0.core_nwb_device import Device from numpydantic import NDArray, Shape -from ...core.v2_5_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync +from ...core.v2_5_0.core_nwb_base import ( + Image, + TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, + Images, +) metamodel_version = "None" version = "2.5.0" @@ -75,7 +82,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -98,7 +105,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -121,7 +128,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -159,6 +166,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -208,7 +224,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -223,6 +239,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -242,6 +267,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -313,6 +347,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -360,6 +403,24 @@ class IndexSeries(TimeSeries): description="""Index of the image (using zero-indexing) in the linked Images object.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Optional[Union[ImageSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) + indexed_images: Optional[Union[Images, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Images"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py index 11d9e44..2ac3d3f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,13 +287,27 @@ class DecompositionSeries(TimeSeries): None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) bands: DecompositionSeriesBands = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -341,7 +355,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -432,7 +446,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -442,7 +461,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -463,14 +487,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( @@ -501,14 +535,24 @@ class Units(DynamicTable): None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) waveforms_index_index: Named[Optional[VectorIndex]] = Field( None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -548,7 +592,7 @@ class UnitsSpikeTimes(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py index 88958c0..9ffaa75 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_5_0.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_5_0.core_nwb_device import Device metamodel_version = "None" version = "2.5.0" @@ -81,6 +82,15 @@ class OptogeneticSeries(TimeSeries): description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -129,6 +139,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py index 41b48f1..01c7c0d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -21,8 +21,8 @@ from ...hdmf_common.v1_5_0.hdmf_common_table import ( VectorIndex, VectorData, ) +from ...core.v2_5_0.core_nwb_device import Device from numpydantic import NDArray, Shape -from ...core.v2_5_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile from ...core.v2_5_0.core_nwb_base import ( TimeSeriesStartingTime, TimeSeriesSync, @@ -30,6 +30,7 @@ from ...core.v2_5_0.core_nwb_base import ( NWBDataInterface, NWBContainer, ) +from ...core.v2_5_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile metamodel_version = "None" version = "2.5.0" @@ -124,6 +125,15 @@ class TwoPhotonSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -143,6 +153,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -192,7 +211,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -236,7 +260,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -251,7 +275,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -266,7 +290,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[PlaneSegmentation]] = Field( + value: Optional[List[PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) name: str = Field(...) @@ -290,7 +314,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into pixel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( @@ -301,7 +330,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into voxel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( @@ -313,6 +347,15 @@ class PlaneSegmentation(DynamicTable): description="""Image stacks that the segmentation masks apply to.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}}, ) + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -346,7 +389,7 @@ class PlaneSegmentationImageMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -375,7 +418,7 @@ class PlaneSegmentationPixelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -405,7 +448,7 @@ class PlaneSegmentationVoxelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -424,10 +467,117 @@ class ImagingPlane(NWBContainer): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[OpticalChannel]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}} - ) name: str = Field(...) + description: Optional[str] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: Optional[float] = Field( + None, + description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""", + ) + indicator: str = Field(..., description="""Calcium indicator.""") + location: str = Field( + ..., + description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + manifold: Optional[ImagingPlaneManifold] = Field( + None, + description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""", + ) + origin_coords: Optional[ImagingPlaneOriginCoords] = Field( + None, + description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""", + ) + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field( + None, + description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""", + ) + reference_frame: Optional[str] = Field( + None, + description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""", + ) + optical_channel: List[OpticalChannel] = Field( + ..., description="""An optical channel used to record from an imaging plane.""" + ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["manifold"] = Field( + "manifold", + json_schema_extra={ + "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} + }, + ) + conversion: Optional[float] = Field( + None, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + ) + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], + ] + ] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["origin_coords"] = Field( + "origin_coords", + json_schema_extra={ + "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["grid_spacing"] = Field( + "grid_spacing", + json_schema_extra={ + "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) class OpticalChannel(NWBContainer): @@ -453,7 +603,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[CorrectedImageStack]] = Field( + value: Optional[List[CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) name: str = Field(...) @@ -476,6 +626,15 @@ class CorrectedImageStack(NWBDataInterface): ..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""", ) + original: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) # Model rebuild @@ -490,6 +649,9 @@ PlaneSegmentationImageMask.model_rebuild() PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() +ImagingPlaneManifold.model_rebuild() +ImagingPlaneOriginCoords.model_rebuild() +ImagingPlaneGridSpacing.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() CorrectedImageStack.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index 167c50f..50bbf36 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -135,7 +135,7 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -167,7 +167,7 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -199,7 +199,7 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -231,7 +231,7 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -268,7 +268,7 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -294,7 +294,7 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -330,7 +330,7 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py index 092a338..ce0dfe0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py @@ -61,6 +61,9 @@ from ...core.v2_5_0.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, OpticalChannel, MotionCorrection, CorrectedImageStack, @@ -155,6 +158,7 @@ from ...core.v2_5_0.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, LabMetaData, Subject, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 13a1dcf..8a9ce62 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -126,7 +126,7 @@ class TimeSeriesReferenceVectorData(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -150,7 +150,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -169,8 +169,14 @@ class ImageReferences(NWBData): ) name: str = Field(...) - image: List[Image] = Field( - ..., description="""Ordered dataset of references to Image objects.""" + value: List[Image] = Field( + ..., + description="""Ordered dataset of references to Image objects.""", + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "reference"}} + } + }, ) @@ -275,7 +281,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -327,7 +333,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} @@ -354,7 +360,12 @@ class Images(NWBDataInterface): None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index d2321dd..e4310c8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, 1 x"], float], @@ -150,7 +150,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -165,7 +165,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -180,7 +180,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -195,7 +195,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -210,7 +210,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -225,7 +225,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -240,7 +240,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index 694a64f..65b1d87 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_6_0_alpha.core_nwb_device import Device from ...core.v2_6_0_alpha.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -120,7 +121,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -187,7 +193,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -264,7 +275,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -295,6 +311,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -306,7 +331,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -321,7 +346,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -336,7 +361,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -360,6 +385,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -414,6 +448,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index 0073a74..4adcf8b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -127,21 +127,36 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Named[Optional[TimeSeriesReferenceVectorData]] = Field( None, description="""An index into a TimeSeries object.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries_index: Named[Optional[VectorIndex]] = Field( None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index ce4372c..e1521bd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_6_0_alpha.core_nwb_epoch import TimeIntervals from ...core.v2_6_0_alpha.core_nwb_misc import Units from ...core.v2_6_0_alpha.core_nwb_device import Device from ...core.v2_6_0_alpha.core_nwb_ogen import OptogeneticStimulusSite @@ -24,6 +23,7 @@ from ...core.v2_6_0_alpha.core_nwb_icephys import ( RepetitionsTable, ExperimentalConditionsTable, ) +from ...core.v2_6_0_alpha.core_nwb_epoch import TimeIntervals from ...core.v2_6_0_alpha.core_nwb_base import ( NWBData, NWBContainer, @@ -183,19 +183,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -551,6 +541,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + class LabMetaData(NWBContainer): """ Lab-specific meta-data. @@ -627,6 +646,7 @@ GeneralSourceScript.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() SubjectAge.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index c903549..9795c3a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -5,6 +5,7 @@ from enum import Enum import re import sys import numpy as np +from ...core.v2_6_0_alpha.core_nwb_device import Device from ...core.v2_6_0_alpha.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -124,6 +125,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -171,7 +181,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -202,6 +212,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -279,6 +298,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -332,6 +360,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -424,6 +461,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -656,6 +702,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -733,6 +788,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -761,7 +825,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -828,7 +897,12 @@ class IntracellularStimuliTable(DynamicTable): ..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -862,7 +936,12 @@ class IntracellularResponsesTable(DynamicTable): ..., description="""Column storing the reference to the recorded response for the recording (rows)""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -910,7 +989,7 @@ class IntracellularRecordingsTable(AlignedDynamicTable): responses: IntracellularResponsesTable = Field( ..., description="""Table for storing intracellular response related metadata.""" ) - children: Optional[List[DynamicTable]] = Field( + value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) colnames: Optional[str] = Field( @@ -953,7 +1032,12 @@ class SimultaneousRecordingsTable(DynamicTable): ..., description="""Index dataset for the recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -993,7 +1077,7 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1029,7 +1113,12 @@ class SequentialRecordingsTable(DynamicTable): ..., description="""Index dataset for the simultaneous_recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) stimulus_type: NDArray[Any, str] = Field( @@ -1081,7 +1170,7 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1114,7 +1203,12 @@ class RepetitionsTable(DynamicTable): ..., description="""Index dataset for the sequential_recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -1157,7 +1251,7 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1192,7 +1286,12 @@ class ExperimentalConditionsTable(DynamicTable): ..., description="""Index dataset for the repetitions column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -1232,7 +1331,7 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index e69ff14..dd490ec 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -7,12 +7,14 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from ...core.v2_6_0_alpha.core_nwb_device import Device from numpydantic import NDArray, Shape from ...core.v2_6_0_alpha.core_nwb_base import ( Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync, + Images, ) metamodel_version = "None" @@ -80,7 +82,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -103,7 +105,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -126,7 +128,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -164,6 +166,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -213,7 +224,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -228,6 +239,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -247,6 +267,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -318,6 +347,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -365,6 +403,24 @@ class IndexSeries(TimeSeries): description="""Index of the image (using zero-indexing) in the linked Images object.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Optional[Union[ImageSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) + indexed_images: Optional[Union[Images, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Images"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index 428c0b1..2235224 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,13 +287,27 @@ class DecompositionSeries(TimeSeries): None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) bands: DecompositionSeriesBands = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -341,7 +355,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -432,7 +446,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -442,7 +461,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -463,14 +487,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( @@ -501,14 +535,24 @@ class Units(DynamicTable): None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) waveforms_index_index: Named[Optional[VectorIndex]] = Field( None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -548,7 +592,7 @@ class UnitsSpikeTimes(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index 419b1d0..b4db178 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_6_0_alpha.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_6_0_alpha.core_nwb_device import Device metamodel_version = "None" version = "2.6.0-alpha" @@ -81,6 +82,15 @@ class OptogeneticSeries(TimeSeries): description="""Applied power for optogenetic stimulus, in watts.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -129,6 +139,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index 7693cc6..9e6c34a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -21,8 +21,8 @@ from ...hdmf_common.v1_5_0.hdmf_common_table import ( VectorIndex, VectorData, ) +from ...core.v2_6_0_alpha.core_nwb_device import Device from numpydantic import NDArray, Shape -from ...core.v2_6_0_alpha.core_nwb_image import ImageSeries, ImageSeriesExternalFile from ...core.v2_6_0_alpha.core_nwb_base import ( TimeSeriesStartingTime, TimeSeriesSync, @@ -30,6 +30,7 @@ from ...core.v2_6_0_alpha.core_nwb_base import ( NWBDataInterface, NWBContainer, ) +from ...core.v2_6_0_alpha.core_nwb_image import ImageSeries, ImageSeriesExternalFile metamodel_version = "None" version = "2.6.0-alpha" @@ -129,6 +130,15 @@ class OnePhotonSeries(ImageSeries): intensity: Optional[float] = Field( None, description="""Intensity of the excitation in mW/mm^2, if known.""" ) + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -148,6 +158,15 @@ class OnePhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -200,6 +219,15 @@ class TwoPhotonSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -219,6 +247,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -268,7 +305,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -312,7 +354,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -327,7 +369,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -342,7 +384,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[PlaneSegmentation]] = Field( + value: Optional[List[PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) name: str = Field(...) @@ -366,7 +408,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into pixel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( @@ -377,7 +424,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into voxel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( @@ -389,6 +441,15 @@ class PlaneSegmentation(DynamicTable): description="""Image stacks that the segmentation masks apply to.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}}, ) + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -422,7 +483,7 @@ class PlaneSegmentationImageMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -451,7 +512,7 @@ class PlaneSegmentationPixelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -481,7 +542,7 @@ class PlaneSegmentationVoxelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -500,10 +561,117 @@ class ImagingPlane(NWBContainer): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[OpticalChannel]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}} - ) name: str = Field(...) + description: Optional[str] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: Optional[float] = Field( + None, + description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""", + ) + indicator: str = Field(..., description="""Calcium indicator.""") + location: str = Field( + ..., + description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + manifold: Optional[ImagingPlaneManifold] = Field( + None, + description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""", + ) + origin_coords: Optional[ImagingPlaneOriginCoords] = Field( + None, + description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""", + ) + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field( + None, + description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""", + ) + reference_frame: Optional[str] = Field( + None, + description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""", + ) + optical_channel: List[OpticalChannel] = Field( + ..., description="""An optical channel used to record from an imaging plane.""" + ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["manifold"] = Field( + "manifold", + json_schema_extra={ + "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} + }, + ) + conversion: Optional[float] = Field( + None, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + ) + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], + ] + ] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["origin_coords"] = Field( + "origin_coords", + json_schema_extra={ + "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["grid_spacing"] = Field( + "grid_spacing", + json_schema_extra={ + "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) class OpticalChannel(NWBContainer): @@ -529,7 +697,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[CorrectedImageStack]] = Field( + value: Optional[List[CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) name: str = Field(...) @@ -552,6 +720,15 @@ class CorrectedImageStack(NWBDataInterface): ..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""", ) + original: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) # Model rebuild @@ -567,6 +744,9 @@ PlaneSegmentationImageMask.model_rebuild() PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() +ImagingPlaneManifold.model_rebuild() +ImagingPlaneOriginCoords.model_rebuild() +ImagingPlaneGridSpacing.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() CorrectedImageStack.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index ce3ae04..9dd7994 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -135,7 +135,7 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -167,7 +167,7 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -199,7 +199,7 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -231,7 +231,7 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -268,7 +268,7 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -294,7 +294,7 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -330,7 +330,7 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py index 9a21619..6cb7862 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py @@ -62,6 +62,9 @@ from ...core.v2_6_0_alpha.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, OpticalChannel, MotionCorrection, CorrectedImageStack, @@ -156,6 +159,7 @@ from ...core.v2_6_0_alpha.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, LabMetaData, Subject, SubjectAge, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py index 745ac40..38c4e21 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -126,7 +126,7 @@ class TimeSeriesReferenceVectorData(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -150,7 +150,7 @@ class Image(NWBData): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -169,8 +169,14 @@ class ImageReferences(NWBData): ) name: str = Field(...) - image: List[Image] = Field( - ..., description="""Ordered dataset of references to Image objects.""" + value: List[Image] = Field( + ..., + description="""Ordered dataset of references to Image objects.""", + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "reference"}} + } + }, ) @@ -275,7 +281,7 @@ class TimeSeriesData(ConfiguredBaseModel): None, description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], Any], NDArray[Shape["* num_times, * num_dim2"], Any], @@ -327,7 +333,7 @@ class ProcessingModule(NWBContainer): {"from_schema": "core.nwb.base", "tree_root": True} ) - children: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( + value: Optional[List[Union[DynamicTable, NWBDataInterface]]] = Field( None, json_schema_extra={ "linkml_meta": {"any_of": [{"range": "NWBDataInterface"}, {"range": "DynamicTable"}]} @@ -354,7 +360,12 @@ class Images(NWBDataInterface): None, description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py index 304d675..f1ea1a0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -131,7 +131,7 @@ class SpatialSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, 1 x"], float], @@ -150,7 +150,7 @@ class BehavioralEpochs(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[IntervalSeries]] = Field( + value: Optional[List[IntervalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "IntervalSeries"}]}} ) name: str = Field(...) @@ -165,7 +165,7 @@ class BehavioralEvents(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -180,7 +180,7 @@ class BehavioralTimeSeries(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -195,7 +195,7 @@ class PupilTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[TimeSeries]] = Field( + value: Optional[List[TimeSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "TimeSeries"}]}} ) name: str = Field(...) @@ -210,7 +210,7 @@ class EyeTracking(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -225,7 +225,7 @@ class CompassDirection(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) @@ -240,7 +240,7 @@ class Position(NWBDataInterface): {"from_schema": "core.nwb.behavior", "tree_root": True} ) - children: Optional[List[SpatialSeries]] = Field( + value: Optional[List[SpatialSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpatialSeries"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index 4b32f60..2a92c91 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -16,6 +16,7 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) +from ...core.v2_7_0.core_nwb_device import Device from ...core.v2_7_0.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -120,7 +121,12 @@ class ElectricalSeries(TimeSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -187,7 +193,12 @@ class SpikeEventSeries(ElectricalSeries): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( @@ -264,7 +275,12 @@ class FeatureExtraction(NWBDataInterface): ..., description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) @@ -295,6 +311,15 @@ class EventDetection(NWBDataInterface): description="""Timestamps of events, in seconds.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_events"}]}}}, ) + source_electricalseries: Union[ElectricalSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ElectricalSeries"}, {"range": "string"}], + } + }, + ) class EventWaveform(NWBDataInterface): @@ -306,7 +331,7 @@ class EventWaveform(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[SpikeEventSeries]] = Field( + value: Optional[List[SpikeEventSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "SpikeEventSeries"}]}} ) name: str = Field(...) @@ -321,7 +346,7 @@ class FilteredEphys(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -336,7 +361,7 @@ class LFP(NWBDataInterface): {"from_schema": "core.nwb.ecephys", "tree_root": True} ) - children: Optional[List[ElectricalSeries]] = Field( + value: Optional[List[ElectricalSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "ElectricalSeries"}]}} ) name: str = Field(...) @@ -360,6 +385,15 @@ class ElectrodeGroup(NWBContainer): position: Optional[ElectrodeGroupPosition] = Field( None, description="""stereotaxic or common framework coordinates""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class ElectrodeGroupPosition(ConfiguredBaseModel): @@ -414,6 +448,15 @@ class ClusterWaveforms(NWBDataInterface): } }, ) + clustering_interface: Union[Clustering, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Clustering"}, {"range": "string"}], + } + }, + ) class Clustering(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py index a715324..44cc59e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -127,21 +127,36 @@ class TimeIntervals(DynamicTable): None, description="""Index for tags.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries: Named[Optional[TimeSeriesReferenceVectorData]] = Field( None, description="""An index into a TimeSeries object.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) timeseries_index: Named[Optional[VectorIndex]] = Field( None, description="""Index for timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py index 0e7d234..5d2c034 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -7,7 +7,6 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...core.v2_7_0.core_nwb_epoch import TimeIntervals from ...core.v2_7_0.core_nwb_misc import Units from ...core.v2_7_0.core_nwb_device import Device from ...core.v2_7_0.core_nwb_ogen import OptogeneticStimulusSite @@ -24,6 +23,7 @@ from ...core.v2_7_0.core_nwb_icephys import ( RepetitionsTable, ExperimentalConditionsTable, ) +from ...core.v2_7_0.core_nwb_epoch import TimeIntervals from ...core.v2_7_0.core_nwb_base import ( NWBData, NWBContainer, @@ -183,19 +183,9 @@ class NWBFile(NWBContainer): ..., description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", ) - intervals: Optional[List[TimeIntervals]] = Field( + intervals: Optional[NWBFileIntervals] = Field( None, description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", - json_schema_extra={ - "linkml_meta": { - "any_of": [ - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - {"range": "TimeIntervals"}, - ] - } - }, ) units: Optional[Units] = Field(None, description="""Data about sorted spike units.""") @@ -559,6 +549,35 @@ class GeneralIntracellularEphys(ConfiguredBaseModel): ) +class NWBFileIntervals(ConfiguredBaseModel): + """ + Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.file"}) + + name: Literal["intervals"] = Field( + "intervals", + json_schema_extra={ + "linkml_meta": {"equals_string": "intervals", "ifabsent": "string(intervals)"} + }, + ) + epochs: Optional[TimeIntervals] = Field( + None, + description="""Divisions in time marking experimental stages or sub-divisions of a single recording session.""", + ) + trials: Optional[TimeIntervals] = Field( + None, description="""Repeated experimental events that have a logical grouping.""" + ) + invalid_times: Optional[TimeIntervals] = Field( + None, description="""Time intervals that should be removed from analysis.""" + ) + time_intervals: Optional[List[TimeIntervals]] = Field( + None, + description="""Optional additional table(s) for describing other experimental time intervals.""", + ) + + class LabMetaData(NWBContainer): """ Lab-specific meta-data. @@ -635,6 +654,7 @@ GeneralSourceScript.model_rebuild() GeneralExtracellularEphys.model_rebuild() ExtracellularEphysElectrodes.model_rebuild() GeneralIntracellularEphys.model_rebuild() +NWBFileIntervals.model_rebuild() LabMetaData.model_rebuild() Subject.model_rebuild() SubjectAge.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py index b84a7f0..23a0ff2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -5,6 +5,7 @@ from enum import Enum import re import sys import numpy as np +from ...core.v2_7_0.core_nwb_device import Device from ...core.v2_7_0.core_nwb_base import ( TimeSeries, TimeSeriesStartingTime, @@ -124,6 +125,15 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -171,7 +181,7 @@ class PatchClampSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) - array: Optional[NDArray[Shape["* num_times"], float]] = Field( + value: Optional[NDArray[Shape["* num_times"], float]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}} ) @@ -202,6 +212,15 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -279,6 +298,15 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -332,6 +360,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -424,6 +461,15 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -656,6 +702,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) + electrode: Union[IntracellularElectrode, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "IntracellularElectrode"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -733,6 +788,15 @@ class IntracellularElectrode(NWBContainer): slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) class SweepTable(DynamicTable): @@ -761,7 +825,12 @@ class SweepTable(DynamicTable): ..., description="""Index for series.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -828,14 +897,24 @@ class IntracellularStimuliTable(DynamicTable): ..., description="""Column storing the reference to the recorded stimulus for the recording (rows).""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) stimulus_template: Named[Optional[TimeSeriesReferenceVectorData]] = Field( None, description="""Column storing the reference to the stimulus template for the recording (rows).""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -869,7 +948,12 @@ class IntracellularResponsesTable(DynamicTable): ..., description="""Column storing the reference to the recorded response for the recording (rows)""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -917,7 +1001,7 @@ class IntracellularRecordingsTable(AlignedDynamicTable): responses: IntracellularResponsesTable = Field( ..., description="""Table for storing intracellular response related metadata.""" ) - children: Optional[List[DynamicTable]] = Field( + value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) colnames: Optional[str] = Field( @@ -960,7 +1044,12 @@ class SimultaneousRecordingsTable(DynamicTable): ..., description="""Index dataset for the recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -1000,7 +1089,7 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1036,7 +1125,12 @@ class SequentialRecordingsTable(DynamicTable): ..., description="""Index dataset for the simultaneous_recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) stimulus_type: NDArray[Any, str] = Field( @@ -1088,7 +1182,7 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1121,7 +1215,12 @@ class RepetitionsTable(DynamicTable): ..., description="""Index dataset for the sequential_recordings column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -1164,7 +1263,7 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -1199,7 +1298,12 @@ class ExperimentalConditionsTable(DynamicTable): ..., description="""Index dataset for the repetitions column.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -1239,7 +1343,7 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py index 9fff36e..e43675b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -7,8 +7,15 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from ...core.v2_7_0.core_nwb_device import Device from numpydantic import NDArray, Shape -from ...core.v2_7_0.core_nwb_base import Image, TimeSeries, TimeSeriesStartingTime, TimeSeriesSync +from ...core.v2_7_0.core_nwb_base import ( + Image, + TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, + Images, +) metamodel_version = "None" version = "2.7.0" @@ -75,7 +82,7 @@ class GrayscaleImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -98,7 +105,7 @@ class RGBImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -121,7 +128,7 @@ class RGBAImage(Image): None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) description: Optional[str] = Field(None, description="""Description of the image.""") - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* x, * y"], float], NDArray[Shape["* x, * y, 3 r_g_b"], float], @@ -159,6 +166,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -208,7 +224,7 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): None, description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) - array: Optional[NDArray[Shape["* num_files"], str]] = Field( + value: Optional[NDArray[Shape["* num_files"], str]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_files"}]}}} ) @@ -223,6 +239,15 @@ class ImageMaskSeries(ImageSeries): ) name: str = Field(...) + masked_imageseries: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -242,6 +267,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -313,6 +347,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -360,6 +403,24 @@ class IndexSeries(TimeSeries): description="""Index of the image (using zero-indexing) in the linked Images object.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) + indexed_timeseries: Optional[Union[ImageSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) + indexed_images: Optional[Union[Images, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Images"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py index 7d8bcb2..c4b1c2e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -167,7 +167,7 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): None, description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* num_times"], float], NDArray[Shape["* num_times, * num_features"], float], @@ -287,13 +287,27 @@ class DecompositionSeries(TimeSeries): None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) bands: DecompositionSeriesBands = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) + source_timeseries: Optional[Union[TimeSeries, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "TimeSeries"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -341,7 +355,7 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( + value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, json_schema_extra={ "linkml_meta": { @@ -432,7 +446,12 @@ class Units(DynamicTable): None, description="""Index into the spike_times dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) spike_times: Optional[UnitsSpikeTimes] = Field( @@ -442,7 +461,12 @@ class Units(DynamicTable): None, description="""Index into the obs_intervals dataset.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( @@ -463,14 +487,24 @@ class Units(DynamicTable): None, description="""Index into electrodes.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrodes: Named[Optional[DynamicTableRegion]] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) electrode_group: Optional[List[ElectrodeGroup]] = Field( @@ -501,14 +535,24 @@ class Units(DynamicTable): None, description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) waveforms_index_index: Named[Optional[VectorIndex]] = Field( None, description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) colnames: Optional[str] = Field( @@ -548,7 +592,7 @@ class UnitsSpikeTimes(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py index 4e0da1a..e575a80 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -14,6 +14,7 @@ from ...core.v2_7_0.core_nwb_base import ( TimeSeriesSync, NWBContainer, ) +from ...core.v2_7_0.core_nwb_device import Device metamodel_version = "None" version = "2.7.0" @@ -82,6 +83,15 @@ class OptogeneticSeries(TimeSeries): ..., description="""Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents.""", ) + site: Union[OptogeneticStimulusSite, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "OptogeneticStimulusSite"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -130,6 +140,15 @@ class OptogeneticStimulusSite(NWBContainer): ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py index 789a327..daeed24 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -21,8 +21,8 @@ from ...hdmf_common.v1_8_0.hdmf_common_table import ( VectorIndex, VectorData, ) +from ...core.v2_7_0.core_nwb_device import Device from numpydantic import NDArray, Shape -from ...core.v2_7_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile from ...core.v2_7_0.core_nwb_base import ( TimeSeriesStartingTime, TimeSeriesSync, @@ -30,6 +30,7 @@ from ...core.v2_7_0.core_nwb_base import ( NWBDataInterface, NWBContainer, ) +from ...core.v2_7_0.core_nwb_image import ImageSeries, ImageSeriesExternalFile metamodel_version = "None" version = "2.7.0" @@ -129,6 +130,15 @@ class OnePhotonSeries(ImageSeries): intensity: Optional[float] = Field( None, description="""Intensity of the excitation in mW/mm^2, if known.""" ) + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -148,6 +158,15 @@ class OnePhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -200,6 +219,15 @@ class TwoPhotonSeries(ImageSeries): NDArray[Shape["2 width_height"], float], NDArray[Shape["3 width_height_depth"], float] ] ] = Field(None, description="""Width, height and depth of image, or imaged area, in meters.""") + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, * z"], float] ] = Field( @@ -219,6 +247,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) + device: Optional[Union[Device, str]] = Field( + None, + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) description: Optional[str] = Field(None, description="""Description of the time series.""") comments: Optional[str] = Field( None, @@ -268,7 +305,12 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) description: Optional[str] = Field(None, description="""Description of the time series.""") @@ -312,7 +354,7 @@ class DfOverF(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -327,7 +369,7 @@ class Fluorescence(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[RoiResponseSeries]] = Field( + value: Optional[List[RoiResponseSeries]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "RoiResponseSeries"}]}} ) name: str = Field(...) @@ -342,7 +384,7 @@ class ImageSegmentation(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[PlaneSegmentation]] = Field( + value: Optional[List[PlaneSegmentation]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "PlaneSegmentation"}]}} ) name: str = Field(...) @@ -366,7 +408,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into pixel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) pixel_mask: Optional[PlaneSegmentationPixelMask] = Field( @@ -377,7 +424,12 @@ class PlaneSegmentation(DynamicTable): None, description="""Index into voxel_mask.""", json_schema_extra={ - "linkml_meta": {"annotations": {"named": {"tag": "named", "value": True}}} + "linkml_meta": { + "annotations": { + "named": {"tag": "named", "value": True}, + "source_type": {"tag": "source_type", "value": "neurodata_type_inc"}, + } + } }, ) voxel_mask: Optional[PlaneSegmentationVoxelMask] = Field( @@ -389,6 +441,15 @@ class PlaneSegmentation(DynamicTable): description="""Image stacks that the segmentation masks apply to.""", json_schema_extra={"linkml_meta": {"any_of": [{"range": "ImageSeries"}]}}, ) + imaging_plane: Union[ImagingPlane, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImagingPlane"}, {"range": "string"}], + } + }, + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -422,7 +483,7 @@ class PlaneSegmentationImageMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -451,7 +512,7 @@ class PlaneSegmentationPixelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -481,7 +542,7 @@ class PlaneSegmentationVoxelMask(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -500,10 +561,117 @@ class ImagingPlane(NWBContainer): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[OpticalChannel]] = Field( - None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "OpticalChannel"}]}} - ) name: str = Field(...) + description: Optional[str] = Field(None, description="""Description of the imaging plane.""") + excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + imaging_rate: Optional[float] = Field( + None, + description="""Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.""", + ) + indicator: str = Field(..., description="""Calcium indicator.""") + location: str = Field( + ..., + description="""Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + manifold: Optional[ImagingPlaneManifold] = Field( + None, + description="""DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.""", + ) + origin_coords: Optional[ImagingPlaneOriginCoords] = Field( + None, + description="""Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).""", + ) + grid_spacing: Optional[ImagingPlaneGridSpacing] = Field( + None, + description="""Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.""", + ) + reference_frame: Optional[str] = Field( + None, + description="""Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"""", + ) + optical_channel: List[OpticalChannel] = Field( + ..., description="""An optical channel used to record from an imaging plane.""" + ) + device: Union[Device, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "Device"}, {"range": "string"}], + } + }, + ) + + +class ImagingPlaneManifold(ConfiguredBaseModel): + """ + DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["manifold"] = Field( + "manifold", + json_schema_extra={ + "linkml_meta": {"equals_string": "manifold", "ifabsent": "string(manifold)"} + }, + ) + conversion: Optional[float] = Field( + None, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + ) + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + ) + value: Optional[ + Union[ + NDArray[Shape["* height, * width, 3 x_y_z"], float], + NDArray[Shape["* height, * width, * depth, 3 x_y_z"], float], + ] + ] = Field(None) + + +class ImagingPlaneOriginCoords(ConfiguredBaseModel): + """ + Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["origin_coords"] = Field( + "origin_coords", + json_schema_extra={ + "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) + + +class ImagingPlaneGridSpacing(ConfiguredBaseModel): + """ + Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "core.nwb.ophys"}) + + name: Literal["grid_spacing"] = Field( + "grid_spacing", + json_schema_extra={ + "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} + }, + ) + unit: Optional[str] = Field( + None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + ) + value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( + Field(None) + ) class OpticalChannel(NWBContainer): @@ -529,7 +697,7 @@ class MotionCorrection(NWBDataInterface): {"from_schema": "core.nwb.ophys", "tree_root": True} ) - children: Optional[List[CorrectedImageStack]] = Field( + value: Optional[List[CorrectedImageStack]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "CorrectedImageStack"}]}} ) name: str = Field(...) @@ -552,6 +720,15 @@ class CorrectedImageStack(NWBDataInterface): ..., description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""", ) + original: Union[ImageSeries, str] = Field( + ..., + json_schema_extra={ + "linkml_meta": { + "annotations": {"source_type": {"tag": "source_type", "value": "link"}}, + "any_of": [{"range": "ImageSeries"}, {"range": "string"}], + } + }, + ) # Model rebuild @@ -567,6 +744,9 @@ PlaneSegmentationImageMask.model_rebuild() PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() +ImagingPlaneManifold.model_rebuild() +ImagingPlaneOriginCoords.model_rebuild() +ImagingPlaneGridSpacing.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() CorrectedImageStack.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py index 402cc40..40c1f40 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -135,7 +135,7 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -167,7 +167,7 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -199,7 +199,7 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -231,7 +231,7 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -268,7 +268,7 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -294,7 +294,7 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} @@ -330,7 +330,7 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) - array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( + value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py index 80b7f5e..4f98b35 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py @@ -63,6 +63,9 @@ from ...core.v2_7_0.core_nwb_ophys import ( PlaneSegmentationPixelMask, PlaneSegmentationVoxelMask, ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, OpticalChannel, MotionCorrection, CorrectedImageStack, @@ -157,6 +160,7 @@ from ...core.v2_7_0.core_nwb_file import ( GeneralExtracellularEphys, ExtracellularEphysElectrodes, GeneralIntracellularEphys, + NWBFileIntervals, LabMetaData, Subject, SubjectAge, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index dbca48c..7ca0724 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -332,7 +332,7 @@ class VectorData(VectorDataMixin): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -355,7 +355,7 @@ class VectorIndex(VectorIndexMixin): target: Optional[VectorData] = Field( None, description="""Reference to the target dataset that this index applies to.""" ) - array: Optional[NDArray[Shape["* num_rows"], Any]] = Field( + value: Optional[NDArray[Shape["* num_rows"], Any]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}} ) @@ -390,7 +390,7 @@ class DynamicTableRegion(VectorData): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py index 078665b..29c6ea2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py @@ -91,7 +91,7 @@ class SimpleMultiContainer(Container): {"from_schema": "hdmf-common.base", "tree_root": True} ) - children: Optional[List[Container]] = Field( + value: Optional[List[Container]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 9dbabc2..7df7183 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -306,7 +306,7 @@ class VectorData(VectorDataMixin): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -332,7 +332,7 @@ class VectorIndex(VectorIndexMixin): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -372,7 +372,7 @@ class DynamicTableRegion(VectorData): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -418,7 +418,7 @@ class AlignedDynamicTable(DynamicTable): {"from_schema": "hdmf-common.table", "tree_root": True} ) - children: Optional[List[DynamicTable]] = Field( + value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py index 93ec4a2..f125a10 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py @@ -91,7 +91,7 @@ class SimpleMultiContainer(Container): {"from_schema": "hdmf-common.base", "tree_root": True} ) - children: Optional[List[Container]] = Field( + value: Optional[List[Container]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index e9ce91d..e168269 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -306,7 +306,7 @@ class VectorData(VectorDataMixin): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -332,7 +332,7 @@ class VectorIndex(VectorIndexMixin): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -372,7 +372,7 @@ class DynamicTableRegion(VectorData): description: Optional[str] = Field( None, description="""Description of what this table region points to.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], @@ -418,7 +418,7 @@ class AlignedDynamicTable(DynamicTable): {"from_schema": "hdmf-common.table", "tree_root": True} ) - children: Optional[List[DynamicTable]] = Field( + value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index 065f135..135f2a3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -78,7 +78,7 @@ class EnumData(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py index 13d77f3..a7e6936 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py @@ -78,7 +78,7 @@ class EnumData(VectorData): description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) - array: Optional[ + value: Optional[ Union[ NDArray[Shape["* dim0"], Any], NDArray[Shape["* dim0, * dim1"], Any], diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml index 11d5b6f..b2eebf1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml @@ -42,8 +42,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -202,8 +202,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -268,8 +268,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml index f9539be..4e5ffa0 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml @@ -62,8 +62,8 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -89,8 +89,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -103,8 +103,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -117,8 +117,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -130,8 +130,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -143,8 +143,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -160,8 +160,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -173,8 +173,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml index 758cec8..5af96b3 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml @@ -52,6 +52,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -167,6 +170,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -211,6 +217,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -219,8 +236,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -242,8 +259,8 @@ classes: the ElectricalSeries. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -257,8 +274,8 @@ classes: properties should be noted in the ElectricalSeries description or comments field. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -290,6 +307,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -364,6 +392,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml index bc5be75..924f069 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -73,6 +76,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml index cad0645..3dd86fc 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml @@ -192,14 +192,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -683,3 +678,41 @@ classes: range: SweepTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml index 6dde850..d12f7e1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -492,6 +503,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -523,6 +545,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml index 4cab9c4..271f77d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml @@ -132,8 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -151,6 +151,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -217,4 +228,15 @@ classes: range: int32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml index e0b5dea..92edf08 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -159,6 +159,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -176,8 +187,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -252,6 +263,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -268,6 +282,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -289,6 +306,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -299,6 +319,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ogen.yaml index 0ee27d7..4f62a54 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ogen.yaml @@ -32,6 +32,17 @@ classes: range: numeric required: true multivalued: false + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -62,4 +73,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml index ca16020..d44aaa2 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml @@ -50,6 +50,17 @@ classes: dimensions: - alias: width_height exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -81,6 +92,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -94,8 +108,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -109,8 +123,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -128,8 +142,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -229,6 +243,17 @@ classes: range: OpticalChannel required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ImagingPlane__manifold: name: ImagingPlane__manifold @@ -258,8 +283,8 @@ classes: description: Base unit of measurement for working with the data. The default value is 'meters'. range: text - array: - name: array + value: + name: value range: float32 any_of: - array: @@ -291,8 +316,8 @@ classes: name: unit description: Measurement units for origin_coords. The default value is 'meters'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: x_y @@ -316,8 +341,8 @@ classes: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: x_y @@ -353,8 +378,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml index 900025d..f3a06cd 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml @@ -32,8 +32,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -54,8 +54,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -112,6 +112,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Phase response to stimulus on the first measured axis. range: AxisMap required: true @@ -122,6 +125,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: AxisMap @@ -133,6 +139,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Phase response to stimulus on the second measured axis. range: AxisMap required: true @@ -143,6 +152,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Power response to stimulus on the second measured axis. range: AxisMap required: false @@ -153,6 +165,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Sine of the angle between the direction of the gradient in axis_1 and axis_2. range: RetinotopyMap @@ -183,6 +198,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: 'Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]' range: RetinotopyImage diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml index 0b809d9..234bbc7 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml @@ -42,8 +42,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -202,8 +202,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -268,8 +268,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml index 1b5ce9c..03ba313 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml @@ -62,8 +62,8 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -89,8 +89,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -103,8 +103,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -117,8 +117,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -130,8 +130,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -143,8 +143,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -160,8 +160,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -173,8 +173,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml index 280381d..2226676 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml @@ -52,6 +52,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -167,6 +170,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -211,6 +217,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -219,8 +236,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -242,8 +259,8 @@ classes: the ElectricalSeries. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -257,8 +274,8 @@ classes: properties should be noted in the ElectricalSeries description or comments field. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -290,6 +307,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -364,6 +392,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml index 2b2093c..1f086dc 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -73,6 +76,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml index 8506a84..504a763 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml @@ -192,14 +192,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -683,3 +678,41 @@ classes: range: SweepTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml index ab4153b..42fa48f 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -492,6 +503,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -523,6 +545,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml index 495afe3..c74d614 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml @@ -132,8 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -151,6 +151,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -217,4 +228,15 @@ classes: range: int32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml index 758c683..82a8100 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -159,6 +159,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -176,8 +187,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -252,6 +263,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -268,6 +282,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -289,6 +306,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -299,6 +319,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ogen.yaml index 2ffb3ae..61e640c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ogen.yaml @@ -32,6 +32,17 @@ classes: range: numeric required: true multivalued: false + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -62,4 +73,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml index cea4194..d5e6b39 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml @@ -50,6 +50,17 @@ classes: dimensions: - alias: width_height exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -81,6 +92,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -94,8 +108,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -109,8 +123,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -128,8 +142,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -229,6 +243,17 @@ classes: range: OpticalChannel required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ImagingPlane__manifold: name: ImagingPlane__manifold @@ -258,8 +283,8 @@ classes: description: Base unit of measurement for working with the data. The default value is 'meters'. range: text - array: - name: array + value: + name: value range: float32 any_of: - array: @@ -291,8 +316,8 @@ classes: name: unit description: Measurement units for origin_coords. The default value is 'meters'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: x_y @@ -316,8 +341,8 @@ classes: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: x_y @@ -353,8 +378,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml index bb1ba70..5a552ed 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml @@ -32,8 +32,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -54,8 +54,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -112,6 +112,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Phase response to stimulus on the first measured axis. range: AxisMap required: true @@ -122,6 +125,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. range: AxisMap @@ -133,6 +139,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Phase response to stimulus on the second measured axis. range: AxisMap required: true @@ -143,6 +152,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Power response to stimulus on the second measured axis. range: AxisMap required: false @@ -153,6 +165,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Sine of the angle between the direction of the gradient in axis_1 and axis_2. range: RetinotopyMap @@ -183,6 +198,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: 'Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]' range: RetinotopyImage diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml index 5ba1d46..bf4ee39 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml @@ -42,8 +42,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -202,8 +202,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -268,8 +268,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml index b644d87..11f271b 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml @@ -62,8 +62,8 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -89,8 +89,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -103,8 +103,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -117,8 +117,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -130,8 +130,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -143,8 +143,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -160,8 +160,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -173,8 +173,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml index c96af97..83d97ad 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml @@ -52,6 +52,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -167,6 +170,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -211,6 +217,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -219,8 +236,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -242,8 +259,8 @@ classes: the ElectricalSeries. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -257,8 +274,8 @@ classes: properties should be noted in the ElectricalSeries description or comments field. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -290,6 +307,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -364,6 +392,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml index 47fa3e4..18c60a6 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -73,6 +76,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml index 4d2c3f8..3a250d5 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml @@ -192,14 +192,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -683,3 +678,41 @@ classes: range: SweepTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml index fb1361c..756714c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -492,6 +503,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -523,6 +545,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml index 71cf395..7cb22bc 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml @@ -132,8 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -151,6 +151,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -236,4 +247,15 @@ classes: range: int32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml index 9f47099..79c8173 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -159,6 +159,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -176,8 +187,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -252,6 +263,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -268,6 +282,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -289,6 +306,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -299,6 +319,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ogen.yaml index 08cb277..f281783 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ogen.yaml @@ -32,6 +32,17 @@ classes: range: numeric required: true multivalued: false + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -62,4 +73,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml index 3eafce0..727c05e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml @@ -50,6 +50,17 @@ classes: dimensions: - alias: width_height exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -81,6 +92,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -94,8 +108,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -109,8 +123,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -128,8 +142,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -141,14 +155,222 @@ classes: description: An imaging plane and its metadata. is_a: NWBContainer attributes: - children: - name: children + name: + name: name + range: string + required: true + description: + name: description + description: Description of the imaging plane. + range: text + required: false + multivalued: false + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + range: float32 + required: true + multivalued: false + imaging_rate: + name: imaging_rate + description: Rate that images are acquired, in Hz. + range: float32 + required: true + multivalued: false + indicator: + name: indicator + description: Calcium indicator. + range: text + required: true + multivalued: false + location: + name: location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + range: text + required: true + multivalued: false + manifold: + name: manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents + the position of the pixel relative to the defined coordinate space. Deprecated + in favor of origin_coords and grid_spacing. + range: ImagingPlane__manifold + required: false + multivalued: false + origin_coords: + name: origin_coords + description: Physical location of the first element of the imaging plane (0, + 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for + what the physical location is relative to (e.g., bregma). + range: ImagingPlane__origin_coords + required: false + multivalued: false + grid_spacing: + name: grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also + reference_frame to interpret the grid. + range: ImagingPlane__grid_spacing + required: false + multivalued: false + reference_frame: + name: reference_frame + description: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and + orientation of the grid defined by origin_coords and grid_spacing or the + vectors needed to transform or rotate the grid to a common anatomical axis + (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and + grid_spacing. If origin_coords and grid_spacing are not present, then this + field is not required. For example, if the microscope takes 10 x 10 x 2 + images, where the first value of the data matrix (index (0, 0, 0)) corresponds + to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is + 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means + more anterior, larger numbers in y means more rightward, and larger numbers + in z means more ventral, then enter the following -- origin_coords = (-1.2, + -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates + are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + range: text + required: false + multivalued: false + optical_channel: + name: optical_channel + description: An optical channel used to record from an imaging plane. + range: OpticalChannel + required: true multivalued: true - inlined: true - inlined_as_list: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false any_of: - - range: NWBContainer + - range: Device + - range: string tree_root: true + ImagingPlane__manifold: + name: ImagingPlane__manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents the + position of the pixel relative to the defined coordinate space. Deprecated in + favor of origin_coords and grid_spacing. + attributes: + name: + name: name + ifabsent: string(manifold) + range: string + required: true + equals_string: manifold + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as pixels from + x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then + the 'conversion' multiplier to get from raw data acquisition pixel units + to meters is 2/1000. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: height + - alias: width + - alias: x_y_z + exact_cardinality: 3 + - array: + dimensions: + - alias: height + - alias: width + - alias: depth + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__origin_coords: + name: ImagingPlane__origin_coords + description: Physical location of the first element of the imaging plane (0, 0) + for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the + physical location is relative to (e.g., bregma). + attributes: + name: + name: name + ifabsent: string(origin_coords) + range: string + required: true + equals_string: origin_coords + unit: + name: unit + description: Measurement units for origin_coords. The default value is 'meters'. + range: text + value: + name: value + array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - alias: x_y_z + exact_cardinality: 3 + range: float32 + ImagingPlane__grid_spacing: + name: ImagingPlane__grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame + to interpret the grid. + attributes: + name: + name: name + ifabsent: string(grid_spacing) + range: string + required: true + equals_string: grid_spacing + unit: + name: unit + description: Measurement units for grid_spacing. The default value is 'meters'. + range: text + value: + name: value + array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - alias: x_y_z + exact_cardinality: 3 + range: float32 + OpticalChannel: + name: OpticalChannel + description: An optical channel used to record from an imaging plane. + is_a: NWBContainer + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description or other notes about the channel. + range: text + required: true + multivalued: false + emission_lambda: + name: emission_lambda + description: Emission wavelength for channel, in nm. + range: float32 + required: true + multivalued: false MotionCorrection: name: MotionCorrection description: 'An image stack where all frames are shifted (registered) to a common @@ -156,8 +378,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml index b4d0df4..2e53599 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml @@ -114,8 +114,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -145,8 +145,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -175,8 +175,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -206,8 +206,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -246,8 +246,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -273,8 +273,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -309,8 +309,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml index b9b7b76..8e458a3 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml @@ -42,8 +42,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -202,8 +202,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -268,8 +268,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml index b2a980c..e6ae7ad 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml @@ -62,8 +62,8 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -89,8 +89,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -103,8 +103,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -117,8 +117,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -130,8 +130,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -143,8 +143,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -160,8 +160,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -173,8 +173,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml index 9de48e5..8195921 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml @@ -52,6 +52,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -167,6 +170,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -211,6 +217,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -219,8 +236,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -242,8 +259,8 @@ classes: the ElectricalSeries. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -257,8 +274,8 @@ classes: properties should be noted in the ElectricalSeries description or comments field. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -290,6 +307,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -364,6 +392,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml index 51e899c..3365825 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -73,6 +76,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml index 885874a..1e98c12 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml @@ -206,14 +206,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -636,6 +631,44 @@ classes: range: SweepTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true LabMetaData: name: LabMetaData description: Lab-specific meta-data. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml index 83cf963..fc1459d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -492,6 +503,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -523,6 +545,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml index dea4aa6..38a564e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml @@ -132,8 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -151,6 +151,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -236,4 +247,15 @@ classes: range: int32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml index 01a3ce4..62fda1f 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -159,6 +159,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -176,8 +187,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -252,6 +263,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -268,6 +282,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -289,6 +306,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -299,6 +319,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ogen.yaml index d158d51..ec81c05 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ogen.yaml @@ -32,6 +32,17 @@ classes: range: numeric required: true multivalued: false + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -62,4 +73,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml index 547d0d5..c02f6d8 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml @@ -50,6 +50,17 @@ classes: dimensions: - alias: width_height exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -81,6 +92,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -94,8 +108,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -109,8 +123,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -128,8 +142,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -158,6 +172,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex required: false @@ -176,6 +193,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex required: false @@ -196,6 +216,17 @@ classes: inlined_as_list: false any_of: - range: ImageSeries + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true PlaneSegmentation__image_mask: name: PlaneSegmentation__image_mask @@ -282,14 +313,202 @@ classes: description: An imaging plane and its metadata. is_a: NWBContainer attributes: - children: - name: children + name: + name: name + range: string + required: true + description: + name: description + description: Description of the imaging plane. + range: text + required: false + multivalued: false + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + range: float32 + required: true + multivalued: false + imaging_rate: + name: imaging_rate + description: Rate that images are acquired, in Hz. If the corresponding TimeSeries + is present, the rate should be stored there instead. + range: float32 + required: false + multivalued: false + indicator: + name: indicator + description: Calcium indicator. + range: text + required: true + multivalued: false + location: + name: location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + range: text + required: true + multivalued: false + manifold: + name: manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents + the position of the pixel relative to the defined coordinate space. Deprecated + in favor of origin_coords and grid_spacing. + range: ImagingPlane__manifold + required: false + multivalued: false + origin_coords: + name: origin_coords + description: Physical location of the first element of the imaging plane (0, + 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for + what the physical location is relative to (e.g., bregma). + range: ImagingPlane__origin_coords + required: false + multivalued: false + grid_spacing: + name: grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also + reference_frame to interpret the grid. + range: ImagingPlane__grid_spacing + required: false + multivalued: false + reference_frame: + name: reference_frame + description: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and + orientation of the grid defined by origin_coords and grid_spacing or the + vectors needed to transform or rotate the grid to a common anatomical axis + (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and + grid_spacing. If origin_coords and grid_spacing are not present, then this + field is not required. For example, if the microscope takes 10 x 10 x 2 + images, where the first value of the data matrix (index (0, 0, 0)) corresponds + to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is + 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means + more anterior, larger numbers in y means more rightward, and larger numbers + in z means more ventral, then enter the following -- origin_coords = (-1.2, + -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates + are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + range: text + required: false + multivalued: false + optical_channel: + name: optical_channel + description: An optical channel used to record from an imaging plane. + range: OpticalChannel + required: true multivalued: true - inlined: true - inlined_as_list: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false any_of: - - range: OpticalChannel + - range: Device + - range: string tree_root: true + ImagingPlane__manifold: + name: ImagingPlane__manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents the + position of the pixel relative to the defined coordinate space. Deprecated in + favor of origin_coords and grid_spacing. + attributes: + name: + name: name + ifabsent: string(manifold) + range: string + required: true + equals_string: manifold + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as pixels from + x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then + the 'conversion' multiplier to get from raw data acquisition pixel units + to meters is 2/1000. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: height + - alias: width + - alias: x_y_z + exact_cardinality: 3 + - array: + dimensions: + - alias: height + - alias: width + - alias: depth + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__origin_coords: + name: ImagingPlane__origin_coords + description: Physical location of the first element of the imaging plane (0, 0) + for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the + physical location is relative to (e.g., bregma). + attributes: + name: + name: name + ifabsent: string(origin_coords) + range: string + required: true + equals_string: origin_coords + unit: + name: unit + description: Measurement units for origin_coords. The default value is 'meters'. + range: text + value: + name: value + array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - alias: x_y_z + exact_cardinality: 3 + range: float32 + ImagingPlane__grid_spacing: + name: ImagingPlane__grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame + to interpret the grid. + attributes: + name: + name: name + ifabsent: string(grid_spacing) + range: string + required: true + equals_string: grid_spacing + unit: + name: unit + description: Measurement units for grid_spacing. The default value is 'meters'. + range: text + value: + name: value + array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - alias: x_y_z + exact_cardinality: 3 + range: float32 OpticalChannel: name: OpticalChannel description: An optical channel used to record from an imaging plane. @@ -319,8 +538,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -349,4 +568,15 @@ classes: range: TimeSeries required: true multivalued: false + original: + name: original + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml index 77858f0..7cf81d7 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml @@ -114,8 +114,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -145,8 +145,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -175,8 +175,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -206,8 +206,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -246,8 +246,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -273,8 +273,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -309,8 +309,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml index dbf0921..e3161a4 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml @@ -42,8 +42,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -202,8 +202,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -268,8 +268,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml index 5ecb8fb..65f31ca 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml @@ -62,8 +62,8 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -89,8 +89,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -103,8 +103,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -117,8 +117,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -130,8 +130,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -143,8 +143,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -160,8 +160,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -173,8 +173,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml index 73148dd..9d008a1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml @@ -52,6 +52,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -167,6 +170,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -211,6 +217,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -219,8 +236,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -242,8 +259,8 @@ classes: the ElectricalSeries. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -257,8 +274,8 @@ classes: properties should be noted in the ElectricalSeries description or comments field. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -290,6 +307,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -364,6 +392,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml index 13cf7c6..9a3bedf 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -73,6 +76,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml index a0aa5f7..efe69a8 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml @@ -206,14 +206,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -636,6 +631,44 @@ classes: range: SweepTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true LabMetaData: name: LabMetaData description: Lab-specific meta-data. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml index 015c804..4aa8c39 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -492,6 +503,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -523,6 +545,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml index eaa5e52..6d564d7 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml @@ -132,8 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -151,6 +151,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -236,4 +247,15 @@ classes: range: int32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml index 115f5ac..bbfbc48 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -159,6 +159,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -176,8 +187,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -252,6 +263,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -268,6 +282,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -289,6 +306,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -299,6 +319,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ogen.yaml index 2be6f23..08d7a0c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ogen.yaml @@ -32,6 +32,17 @@ classes: range: numeric required: true multivalued: false + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -62,4 +73,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml index 902cace..6218538 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml @@ -50,6 +50,17 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -81,6 +92,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -94,8 +108,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -109,8 +123,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -128,8 +142,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -158,6 +172,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex required: false @@ -176,6 +193,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex required: false @@ -196,6 +216,17 @@ classes: inlined_as_list: false any_of: - range: ImageSeries + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true PlaneSegmentation__image_mask: name: PlaneSegmentation__image_mask @@ -282,14 +313,208 @@ classes: description: An imaging plane and its metadata. is_a: NWBContainer attributes: - children: - name: children + name: + name: name + range: string + required: true + description: + name: description + description: Description of the imaging plane. + range: text + required: false + multivalued: false + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + range: float32 + required: true + multivalued: false + imaging_rate: + name: imaging_rate + description: Rate that images are acquired, in Hz. If the corresponding TimeSeries + is present, the rate should be stored there instead. + range: float32 + required: false + multivalued: false + indicator: + name: indicator + description: Calcium indicator. + range: text + required: true + multivalued: false + location: + name: location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + range: text + required: true + multivalued: false + manifold: + name: manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents + the position of the pixel relative to the defined coordinate space. Deprecated + in favor of origin_coords and grid_spacing. + range: ImagingPlane__manifold + required: false + multivalued: false + origin_coords: + name: origin_coords + description: Physical location of the first element of the imaging plane (0, + 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for + what the physical location is relative to (e.g., bregma). + range: ImagingPlane__origin_coords + required: false + multivalued: false + grid_spacing: + name: grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also + reference_frame to interpret the grid. + range: ImagingPlane__grid_spacing + required: false + multivalued: false + reference_frame: + name: reference_frame + description: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and + orientation of the grid defined by origin_coords and grid_spacing or the + vectors needed to transform or rotate the grid to a common anatomical axis + (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and + grid_spacing. If origin_coords and grid_spacing are not present, then this + field is not required. For example, if the microscope takes 10 x 10 x 2 + images, where the first value of the data matrix (index (0, 0, 0)) corresponds + to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is + 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means + more anterior, larger numbers in y means more rightward, and larger numbers + in z means more ventral, then enter the following -- origin_coords = (-1.2, + -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates + are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + range: text + required: false + multivalued: false + optical_channel: + name: optical_channel + description: An optical channel used to record from an imaging plane. + range: OpticalChannel + required: true multivalued: true - inlined: true - inlined_as_list: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false any_of: - - range: OpticalChannel + - range: Device + - range: string tree_root: true + ImagingPlane__manifold: + name: ImagingPlane__manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents the + position of the pixel relative to the defined coordinate space. Deprecated in + favor of origin_coords and grid_spacing. + attributes: + name: + name: name + ifabsent: string(manifold) + range: string + required: true + equals_string: manifold + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as pixels from + x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then + the 'conversion' multiplier to get from raw data acquisition pixel units + to meters is 2/1000. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: height + - alias: width + - alias: x_y_z + exact_cardinality: 3 + - array: + dimensions: + - alias: height + - alias: width + - alias: depth + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__origin_coords: + name: ImagingPlane__origin_coords + description: Physical location of the first element of the imaging plane (0, 0) + for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the + physical location is relative to (e.g., bregma). + attributes: + name: + name: name + ifabsent: string(origin_coords) + range: string + required: true + equals_string: origin_coords + unit: + name: unit + description: Measurement units for origin_coords. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__grid_spacing: + name: ImagingPlane__grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame + to interpret the grid. + attributes: + name: + name: name + ifabsent: string(grid_spacing) + range: string + required: true + equals_string: grid_spacing + unit: + name: unit + description: Measurement units for grid_spacing. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 OpticalChannel: name: OpticalChannel description: An optical channel used to record from an imaging plane. @@ -319,8 +544,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -349,4 +574,15 @@ classes: range: TimeSeries required: true multivalued: false + original: + name: original + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml index c3acb3f..4fe5360 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml @@ -114,8 +114,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -145,8 +145,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -175,8 +175,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -206,8 +206,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -246,8 +246,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -273,8 +273,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -309,8 +309,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml index 9eff269..8249ab1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml @@ -43,8 +43,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -215,8 +215,8 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -281,8 +281,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml index 59a32d3..755fecd 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml @@ -62,8 +62,8 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -89,8 +89,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -103,8 +103,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -117,8 +117,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -130,8 +130,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -143,8 +143,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -160,8 +160,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -173,8 +173,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml index 888caab..4b9767f 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml @@ -62,6 +62,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -177,6 +180,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -221,6 +227,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -229,8 +246,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -252,8 +269,8 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -267,8 +284,8 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -300,6 +317,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -374,6 +402,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml index 915eba7..6616781 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -73,6 +76,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml index 5b00582..75dc8f9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml @@ -206,14 +206,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -637,6 +632,44 @@ classes: range: SweepTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true LabMetaData: name: LabMetaData description: Lab-specific meta-data. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml index d100eb5..9e14aec 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -497,6 +508,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -528,6 +550,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml index 52b587d..0b93830 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml @@ -104,6 +104,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ImageSeries__external_file: name: ImageSeries__external_file @@ -133,8 +144,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -152,6 +163,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -237,4 +259,15 @@ classes: range: int32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml index 1e5e813..e1d24e2 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -158,6 +158,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion @@ -170,6 +173,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -187,8 +201,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -263,6 +277,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -279,6 +296,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -300,6 +320,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -310,6 +333,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false @@ -390,6 +416,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex @@ -401,6 +430,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml index 48992da..3148b98 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ogen.yaml @@ -32,6 +32,17 @@ classes: range: numeric required: true multivalued: false + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -62,4 +73,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml index 53f031e..15cb79a 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml @@ -50,6 +50,17 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -81,6 +92,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -94,8 +108,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -109,8 +123,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -128,8 +142,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -158,6 +172,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex required: false @@ -176,6 +193,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex required: false @@ -196,6 +216,17 @@ classes: inlined_as_list: false any_of: - range: ImageSeries + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true PlaneSegmentation__image_mask: name: PlaneSegmentation__image_mask @@ -282,14 +313,208 @@ classes: description: An imaging plane and its metadata. is_a: NWBContainer attributes: - children: - name: children + name: + name: name + range: string + required: true + description: + name: description + description: Description of the imaging plane. + range: text + required: false + multivalued: false + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + range: float32 + required: true + multivalued: false + imaging_rate: + name: imaging_rate + description: Rate that images are acquired, in Hz. If the corresponding TimeSeries + is present, the rate should be stored there instead. + range: float32 + required: false + multivalued: false + indicator: + name: indicator + description: Calcium indicator. + range: text + required: true + multivalued: false + location: + name: location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + range: text + required: true + multivalued: false + manifold: + name: manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents + the position of the pixel relative to the defined coordinate space. Deprecated + in favor of origin_coords and grid_spacing. + range: ImagingPlane__manifold + required: false + multivalued: false + origin_coords: + name: origin_coords + description: Physical location of the first element of the imaging plane (0, + 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for + what the physical location is relative to (e.g., bregma). + range: ImagingPlane__origin_coords + required: false + multivalued: false + grid_spacing: + name: grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also + reference_frame to interpret the grid. + range: ImagingPlane__grid_spacing + required: false + multivalued: false + reference_frame: + name: reference_frame + description: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and + orientation of the grid defined by origin_coords and grid_spacing or the + vectors needed to transform or rotate the grid to a common anatomical axis + (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and + grid_spacing. If origin_coords and grid_spacing are not present, then this + field is not required. For example, if the microscope takes 10 x 10 x 2 + images, where the first value of the data matrix (index (0, 0, 0)) corresponds + to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is + 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means + more anterior, larger numbers in y means more rightward, and larger numbers + in z means more ventral, then enter the following -- origin_coords = (-1.2, + -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates + are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + range: text + required: false + multivalued: false + optical_channel: + name: optical_channel + description: An optical channel used to record from an imaging plane. + range: OpticalChannel + required: true multivalued: true - inlined: true - inlined_as_list: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false any_of: - - range: OpticalChannel + - range: Device + - range: string tree_root: true + ImagingPlane__manifold: + name: ImagingPlane__manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents the + position of the pixel relative to the defined coordinate space. Deprecated in + favor of origin_coords and grid_spacing. + attributes: + name: + name: name + ifabsent: string(manifold) + range: string + required: true + equals_string: manifold + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as pixels from + x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then + the 'conversion' multiplier to get from raw data acquisition pixel units + to meters is 2/1000. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: height + - alias: width + - alias: x_y_z + exact_cardinality: 3 + - array: + dimensions: + - alias: height + - alias: width + - alias: depth + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__origin_coords: + name: ImagingPlane__origin_coords + description: Physical location of the first element of the imaging plane (0, 0) + for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the + physical location is relative to (e.g., bregma). + attributes: + name: + name: name + ifabsent: string(origin_coords) + range: string + required: true + equals_string: origin_coords + unit: + name: unit + description: Measurement units for origin_coords. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__grid_spacing: + name: ImagingPlane__grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame + to interpret the grid. + attributes: + name: + name: name + ifabsent: string(grid_spacing) + range: string + required: true + equals_string: grid_spacing + unit: + name: unit + description: Measurement units for grid_spacing. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 OpticalChannel: name: OpticalChannel description: An optical channel used to record from an imaging plane. @@ -319,8 +544,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -349,4 +574,15 @@ classes: range: TimeSeries required: true multivalued: false + original: + name: original + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml index 35c4f49..98473cb 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml @@ -114,8 +114,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -145,8 +145,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -175,8 +175,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -206,8 +206,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -246,8 +246,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -273,8 +273,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -309,8 +309,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml index 8d225c1..77f05be 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml @@ -77,8 +77,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -249,8 +249,8 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -315,8 +315,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml index 27895f5..1ed3fa3 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml @@ -62,8 +62,8 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -89,8 +89,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -103,8 +103,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -117,8 +117,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -130,8 +130,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -143,8 +143,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -160,8 +160,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -173,8 +173,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml index 00ebe6f..43a8eb9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml @@ -62,6 +62,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -177,6 +180,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -221,6 +227,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -229,8 +246,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -252,8 +269,8 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -267,8 +284,8 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -300,6 +317,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -374,6 +402,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml index 19ce0d0..512513e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -73,6 +76,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml index 7aade20..a1bd2ec 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml @@ -206,14 +206,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -691,6 +686,44 @@ classes: range: ExperimentalConditionsTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true LabMetaData: name: LabMetaData description: Lab-specific meta-data. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml index 2150103..89b6b6d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -497,6 +508,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -531,6 +553,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true @@ -575,6 +600,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Column storing the reference to the recorded stimulus for the recording (rows). range: TimeSeriesReferenceVectorData @@ -600,6 +628,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Column storing the reference to the recorded response for the recording (rows) range: TimeSeriesReferenceVectorData @@ -677,6 +708,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the recordings column. range: VectorIndex required: true @@ -727,6 +761,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the simultaneous_recordings column. range: VectorIndex required: true @@ -786,6 +823,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the sequential_recordings column. range: VectorIndex required: true @@ -833,6 +873,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the repetitions column. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml index 71e1c2a..dd88ab9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml @@ -105,6 +105,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ImageSeries__external_file: name: ImageSeries__external_file @@ -134,8 +145,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -153,6 +164,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -238,4 +260,15 @@ classes: range: int32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml index 5910e70..d416ce7 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -158,6 +158,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion @@ -170,6 +173,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -187,8 +201,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -263,6 +277,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -279,6 +296,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -300,6 +320,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -310,6 +333,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false @@ -390,6 +416,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex @@ -401,6 +430,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml index 4786e5d..1add778 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ogen.yaml @@ -32,6 +32,17 @@ classes: range: numeric required: true multivalued: false + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -62,4 +73,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml index b76d14a..0e56b23 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml @@ -50,6 +50,17 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -81,6 +92,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -94,8 +108,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -109,8 +123,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -128,8 +142,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -158,6 +172,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex required: false @@ -176,6 +193,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex required: false @@ -196,6 +216,17 @@ classes: inlined_as_list: false any_of: - range: ImageSeries + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true PlaneSegmentation__image_mask: name: PlaneSegmentation__image_mask @@ -282,14 +313,208 @@ classes: description: An imaging plane and its metadata. is_a: NWBContainer attributes: - children: - name: children + name: + name: name + range: string + required: true + description: + name: description + description: Description of the imaging plane. + range: text + required: false + multivalued: false + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + range: float32 + required: true + multivalued: false + imaging_rate: + name: imaging_rate + description: Rate that images are acquired, in Hz. If the corresponding TimeSeries + is present, the rate should be stored there instead. + range: float32 + required: false + multivalued: false + indicator: + name: indicator + description: Calcium indicator. + range: text + required: true + multivalued: false + location: + name: location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + range: text + required: true + multivalued: false + manifold: + name: manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents + the position of the pixel relative to the defined coordinate space. Deprecated + in favor of origin_coords and grid_spacing. + range: ImagingPlane__manifold + required: false + multivalued: false + origin_coords: + name: origin_coords + description: Physical location of the first element of the imaging plane (0, + 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for + what the physical location is relative to (e.g., bregma). + range: ImagingPlane__origin_coords + required: false + multivalued: false + grid_spacing: + name: grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also + reference_frame to interpret the grid. + range: ImagingPlane__grid_spacing + required: false + multivalued: false + reference_frame: + name: reference_frame + description: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and + orientation of the grid defined by origin_coords and grid_spacing or the + vectors needed to transform or rotate the grid to a common anatomical axis + (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and + grid_spacing. If origin_coords and grid_spacing are not present, then this + field is not required. For example, if the microscope takes 10 x 10 x 2 + images, where the first value of the data matrix (index (0, 0, 0)) corresponds + to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is + 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means + more anterior, larger numbers in y means more rightward, and larger numbers + in z means more ventral, then enter the following -- origin_coords = (-1.2, + -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates + are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + range: text + required: false + multivalued: false + optical_channel: + name: optical_channel + description: An optical channel used to record from an imaging plane. + range: OpticalChannel + required: true multivalued: true - inlined: true - inlined_as_list: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false any_of: - - range: OpticalChannel + - range: Device + - range: string tree_root: true + ImagingPlane__manifold: + name: ImagingPlane__manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents the + position of the pixel relative to the defined coordinate space. Deprecated in + favor of origin_coords and grid_spacing. + attributes: + name: + name: name + ifabsent: string(manifold) + range: string + required: true + equals_string: manifold + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as pixels from + x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then + the 'conversion' multiplier to get from raw data acquisition pixel units + to meters is 2/1000. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: height + - alias: width + - alias: x_y_z + exact_cardinality: 3 + - array: + dimensions: + - alias: height + - alias: width + - alias: depth + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__origin_coords: + name: ImagingPlane__origin_coords + description: Physical location of the first element of the imaging plane (0, 0) + for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the + physical location is relative to (e.g., bregma). + attributes: + name: + name: name + ifabsent: string(origin_coords) + range: string + required: true + equals_string: origin_coords + unit: + name: unit + description: Measurement units for origin_coords. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__grid_spacing: + name: ImagingPlane__grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame + to interpret the grid. + attributes: + name: + name: name + ifabsent: string(grid_spacing) + range: string + required: true + equals_string: grid_spacing + unit: + name: unit + description: Measurement units for grid_spacing. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 OpticalChannel: name: OpticalChannel description: An optical channel used to record from an imaging plane. @@ -319,8 +544,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -349,4 +574,15 @@ classes: range: TimeSeries required: true multivalued: false + original: + name: original + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml index 4a62dbb..d6318e1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml @@ -114,8 +114,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -145,8 +145,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -175,8 +175,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -206,8 +206,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -246,8 +246,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -273,8 +273,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -309,8 +309,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml index ae17645..4204fe9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml @@ -77,8 +77,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -107,8 +107,12 @@ classes: name: name range: string required: true - image: - name: image + value: + name: value + annotations: + source_type: + tag: source_type + value: reference description: Ordered dataset of references to Image objects. range: Image required: true @@ -273,8 +277,8 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -339,8 +343,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -376,6 +380,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml index 226b5e1..8a994a2 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml @@ -63,8 +63,8 @@ classes: units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -101,8 +101,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -115,8 +115,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -129,8 +129,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -142,8 +142,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -155,8 +155,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -172,8 +172,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -185,8 +185,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml index 2172085..bb13169 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml @@ -62,6 +62,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -177,6 +180,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -221,6 +227,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -229,8 +246,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -252,8 +269,8 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -267,8 +284,8 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -300,6 +317,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -374,6 +402,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml index 8c2e102..3764b00 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -67,6 +70,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: An index into a TimeSeries object. range: TimeSeriesReferenceVectorData required: false @@ -77,6 +83,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml index 494e7ae..eb94ee7 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml @@ -206,14 +206,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -694,6 +689,44 @@ classes: range: ExperimentalConditionsTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true LabMetaData: name: LabMetaData description: Lab-specific meta-data. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml index dbde975..de10ce5 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -507,6 +518,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -541,6 +563,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true @@ -585,6 +610,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Column storing the reference to the recorded stimulus for the recording (rows). range: TimeSeriesReferenceVectorData @@ -610,6 +638,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Column storing the reference to the recorded response for the recording (rows) range: TimeSeriesReferenceVectorData @@ -687,6 +718,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the recordings column. range: VectorIndex required: true @@ -737,6 +771,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the simultaneous_recordings column. range: VectorIndex required: true @@ -796,6 +833,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the sequential_recordings column. range: VectorIndex required: true @@ -843,6 +883,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the repetitions column. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml index 169e913..c8a9988 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml @@ -105,6 +105,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ImageSeries__external_file: name: ImageSeries__external_file @@ -134,8 +145,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -153,6 +164,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -240,4 +262,26 @@ classes: range: uint32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: ImageSeries + - range: string + indexed_images: + name: indexed_images + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: Images + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml index 18d7b69..e7204ff 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -158,6 +158,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion @@ -170,6 +173,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -187,8 +201,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -263,6 +277,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -279,6 +296,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -300,6 +320,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -310,6 +333,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false @@ -390,6 +416,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex @@ -401,6 +430,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml index 25902d8..adadc3e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ogen.yaml @@ -32,6 +32,17 @@ classes: range: numeric required: true multivalued: false + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -62,4 +73,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml index ebaa877..80df95c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml @@ -50,6 +50,17 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -81,6 +92,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -94,8 +108,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -109,8 +123,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -128,8 +142,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -158,6 +172,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex required: false @@ -176,6 +193,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex required: false @@ -196,6 +216,17 @@ classes: inlined_as_list: false any_of: - range: ImageSeries + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true PlaneSegmentation__image_mask: name: PlaneSegmentation__image_mask @@ -282,14 +313,208 @@ classes: description: An imaging plane and its metadata. is_a: NWBContainer attributes: - children: - name: children + name: + name: name + range: string + required: true + description: + name: description + description: Description of the imaging plane. + range: text + required: false + multivalued: false + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + range: float32 + required: true + multivalued: false + imaging_rate: + name: imaging_rate + description: Rate that images are acquired, in Hz. If the corresponding TimeSeries + is present, the rate should be stored there instead. + range: float32 + required: false + multivalued: false + indicator: + name: indicator + description: Calcium indicator. + range: text + required: true + multivalued: false + location: + name: location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + range: text + required: true + multivalued: false + manifold: + name: manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents + the position of the pixel relative to the defined coordinate space. Deprecated + in favor of origin_coords and grid_spacing. + range: ImagingPlane__manifold + required: false + multivalued: false + origin_coords: + name: origin_coords + description: Physical location of the first element of the imaging plane (0, + 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for + what the physical location is relative to (e.g., bregma). + range: ImagingPlane__origin_coords + required: false + multivalued: false + grid_spacing: + name: grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also + reference_frame to interpret the grid. + range: ImagingPlane__grid_spacing + required: false + multivalued: false + reference_frame: + name: reference_frame + description: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and + orientation of the grid defined by origin_coords and grid_spacing or the + vectors needed to transform or rotate the grid to a common anatomical axis + (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and + grid_spacing. If origin_coords and grid_spacing are not present, then this + field is not required. For example, if the microscope takes 10 x 10 x 2 + images, where the first value of the data matrix (index (0, 0, 0)) corresponds + to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is + 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means + more anterior, larger numbers in y means more rightward, and larger numbers + in z means more ventral, then enter the following -- origin_coords = (-1.2, + -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates + are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + range: text + required: false + multivalued: false + optical_channel: + name: optical_channel + description: An optical channel used to record from an imaging plane. + range: OpticalChannel + required: true multivalued: true - inlined: true - inlined_as_list: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false any_of: - - range: OpticalChannel + - range: Device + - range: string tree_root: true + ImagingPlane__manifold: + name: ImagingPlane__manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents the + position of the pixel relative to the defined coordinate space. Deprecated in + favor of origin_coords and grid_spacing. + attributes: + name: + name: name + ifabsent: string(manifold) + range: string + required: true + equals_string: manifold + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as pixels from + x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then + the 'conversion' multiplier to get from raw data acquisition pixel units + to meters is 2/1000. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: height + - alias: width + - alias: x_y_z + exact_cardinality: 3 + - array: + dimensions: + - alias: height + - alias: width + - alias: depth + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__origin_coords: + name: ImagingPlane__origin_coords + description: Physical location of the first element of the imaging plane (0, 0) + for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the + physical location is relative to (e.g., bregma). + attributes: + name: + name: name + ifabsent: string(origin_coords) + range: string + required: true + equals_string: origin_coords + unit: + name: unit + description: Measurement units for origin_coords. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__grid_spacing: + name: ImagingPlane__grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame + to interpret the grid. + attributes: + name: + name: name + ifabsent: string(grid_spacing) + range: string + required: true + equals_string: grid_spacing + unit: + name: unit + description: Measurement units for grid_spacing. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 OpticalChannel: name: OpticalChannel description: An optical channel used to record from an imaging plane. @@ -319,8 +544,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -349,4 +574,15 @@ classes: range: TimeSeries required: true multivalued: false + original: + name: original + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml index 23a305e..31a6dd3 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml @@ -114,8 +114,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -145,8 +145,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -175,8 +175,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -206,8 +206,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -246,8 +246,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -273,8 +273,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -309,8 +309,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml index df86c7a..6439b29 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml @@ -77,8 +77,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -107,8 +107,12 @@ classes: name: name range: string required: true - image: - name: image + value: + name: value + annotations: + source_type: + tag: source_type + value: reference description: Ordered dataset of references to Image objects. range: Image required: true @@ -273,8 +277,8 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -339,8 +343,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -376,6 +380,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml index 839f43b..9174b6a 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml @@ -63,8 +63,8 @@ classes: units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -101,8 +101,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -115,8 +115,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -129,8 +129,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -142,8 +142,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -155,8 +155,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -172,8 +172,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -185,8 +185,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml index 7c95205..7145c03 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml @@ -62,6 +62,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -177,6 +180,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -221,6 +227,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -229,8 +246,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -252,8 +269,8 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -267,8 +284,8 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -300,6 +317,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -374,6 +402,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml index 7e9c4fc..fb0df61 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -67,6 +70,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: An index into a TimeSeries object. range: TimeSeriesReferenceVectorData required: false @@ -77,6 +83,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml index 6e7a701..3ddb452 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml @@ -206,14 +206,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -694,6 +689,44 @@ classes: range: ExperimentalConditionsTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true LabMetaData: name: LabMetaData description: Lab-specific meta-data. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml index bb47c9e..c24417f 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -507,6 +518,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -541,6 +563,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true @@ -585,6 +610,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Column storing the reference to the recorded stimulus for the recording (rows). range: TimeSeriesReferenceVectorData @@ -610,6 +638,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Column storing the reference to the recorded response for the recording (rows) range: TimeSeriesReferenceVectorData @@ -687,6 +718,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the recordings column. range: VectorIndex required: true @@ -737,6 +771,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the simultaneous_recordings column. range: VectorIndex required: true @@ -796,6 +833,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the sequential_recordings column. range: VectorIndex required: true @@ -843,6 +883,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the repetitions column. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml index 114ea51..4d3fcd0 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml @@ -105,6 +105,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ImageSeries__external_file: name: ImageSeries__external_file @@ -134,8 +145,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -153,6 +164,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -240,4 +262,26 @@ classes: range: uint32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: ImageSeries + - range: string + indexed_images: + name: indexed_images + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: Images + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml index d47eba8..07c142e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -158,6 +158,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion @@ -170,6 +173,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -187,8 +201,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -263,6 +277,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -279,6 +296,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -300,6 +320,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -310,6 +333,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false @@ -390,6 +416,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex @@ -401,6 +430,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml index 9d01859..93ab4af 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ogen.yaml @@ -32,6 +32,17 @@ classes: range: numeric required: true multivalued: false + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -62,4 +73,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml index f1d6bc1..877b532 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml @@ -51,6 +51,17 @@ classes: name: intensity description: Intensity of the excitation in mW/mm^2, if known. range: float32 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true TwoPhotonSeries: name: TwoPhotonSeries @@ -86,6 +97,17 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -117,6 +139,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -130,8 +155,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -145,8 +170,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -164,8 +189,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -194,6 +219,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex required: false @@ -212,6 +240,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex required: false @@ -232,6 +263,17 @@ classes: inlined_as_list: false any_of: - range: ImageSeries + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true PlaneSegmentation__image_mask: name: PlaneSegmentation__image_mask @@ -318,14 +360,208 @@ classes: description: An imaging plane and its metadata. is_a: NWBContainer attributes: - children: - name: children + name: + name: name + range: string + required: true + description: + name: description + description: Description of the imaging plane. + range: text + required: false + multivalued: false + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + range: float32 + required: true + multivalued: false + imaging_rate: + name: imaging_rate + description: Rate that images are acquired, in Hz. If the corresponding TimeSeries + is present, the rate should be stored there instead. + range: float32 + required: false + multivalued: false + indicator: + name: indicator + description: Calcium indicator. + range: text + required: true + multivalued: false + location: + name: location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + range: text + required: true + multivalued: false + manifold: + name: manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents + the position of the pixel relative to the defined coordinate space. Deprecated + in favor of origin_coords and grid_spacing. + range: ImagingPlane__manifold + required: false + multivalued: false + origin_coords: + name: origin_coords + description: Physical location of the first element of the imaging plane (0, + 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for + what the physical location is relative to (e.g., bregma). + range: ImagingPlane__origin_coords + required: false + multivalued: false + grid_spacing: + name: grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also + reference_frame to interpret the grid. + range: ImagingPlane__grid_spacing + required: false + multivalued: false + reference_frame: + name: reference_frame + description: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and + orientation of the grid defined by origin_coords and grid_spacing or the + vectors needed to transform or rotate the grid to a common anatomical axis + (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and + grid_spacing. If origin_coords and grid_spacing are not present, then this + field is not required. For example, if the microscope takes 10 x 10 x 2 + images, where the first value of the data matrix (index (0, 0, 0)) corresponds + to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is + 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means + more anterior, larger numbers in y means more rightward, and larger numbers + in z means more ventral, then enter the following -- origin_coords = (-1.2, + -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates + are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + range: text + required: false + multivalued: false + optical_channel: + name: optical_channel + description: An optical channel used to record from an imaging plane. + range: OpticalChannel + required: true multivalued: true - inlined: true - inlined_as_list: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false any_of: - - range: OpticalChannel + - range: Device + - range: string tree_root: true + ImagingPlane__manifold: + name: ImagingPlane__manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents the + position of the pixel relative to the defined coordinate space. Deprecated in + favor of origin_coords and grid_spacing. + attributes: + name: + name: name + ifabsent: string(manifold) + range: string + required: true + equals_string: manifold + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as pixels from + x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then + the 'conversion' multiplier to get from raw data acquisition pixel units + to meters is 2/1000. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: height + - alias: width + - alias: x_y_z + exact_cardinality: 3 + - array: + dimensions: + - alias: height + - alias: width + - alias: depth + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__origin_coords: + name: ImagingPlane__origin_coords + description: Physical location of the first element of the imaging plane (0, 0) + for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the + physical location is relative to (e.g., bregma). + attributes: + name: + name: name + ifabsent: string(origin_coords) + range: string + required: true + equals_string: origin_coords + unit: + name: unit + description: Measurement units for origin_coords. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__grid_spacing: + name: ImagingPlane__grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame + to interpret the grid. + attributes: + name: + name: name + ifabsent: string(grid_spacing) + range: string + required: true + equals_string: grid_spacing + unit: + name: unit + description: Measurement units for grid_spacing. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 OpticalChannel: name: OpticalChannel description: An optical channel used to record from an imaging plane. @@ -355,8 +591,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -385,4 +621,15 @@ classes: range: TimeSeries required: true multivalued: false + original: + name: original + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml index 629bea8..e913c0d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml @@ -114,8 +114,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -145,8 +145,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -175,8 +175,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -206,8 +206,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -246,8 +246,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -273,8 +273,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -309,8 +309,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml index b21d698..0bd5bd0 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml @@ -77,8 +77,8 @@ classes: name: description description: Description of the image. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -107,8 +107,12 @@ classes: name: name range: string required: true - image: - name: image + value: + name: value + annotations: + source_type: + tag: source_type + value: reference description: Ordered dataset of references to Image objects. range: Image required: true @@ -273,8 +277,8 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -339,8 +343,8 @@ classes: description: A collection of processed data. is_a: NWBContainer attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -376,6 +380,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml index 91d2841..eabccf6 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml @@ -63,8 +63,8 @@ classes: units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -101,8 +101,8 @@ classes: events. BehavioralTimeSeries is for continuous data. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -115,8 +115,8 @@ classes: for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -129,8 +129,8 @@ classes: of BehavioralEpochs for more details. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -142,8 +142,8 @@ classes: description: Eye-tracking data, representing pupil size. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -155,8 +155,8 @@ classes: description: Eye-tracking data, representing direction of gaze. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -172,8 +172,8 @@ classes: be radians or degrees. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -185,8 +185,8 @@ classes: description: Position data, whether along the x, x/y or x/y/z axis. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml index f9525e7..378f644 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml @@ -62,6 +62,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -177,6 +180,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the electrodes that this time series was generated from. range: DynamicTableRegion @@ -221,6 +227,17 @@ classes: range: float64 required: true multivalued: false + source_electricalseries: + name: source_electricalseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ElectricalSeries + - range: string tree_root: true EventWaveform: name: EventWaveform @@ -229,8 +246,8 @@ classes: during experiment acquisition. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -252,8 +269,8 @@ classes: the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -267,8 +284,8 @@ classes: properties should be noted in the ElectricalSeries 'filtering' attribute. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -300,6 +317,17 @@ classes: range: ElectrodeGroup__position required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ElectrodeGroup__position: name: ElectrodeGroup__position @@ -374,6 +402,17 @@ classes: range: float32 required: true multivalued: false + clustering_interface: + name: clustering_interface + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Clustering + - range: string tree_root: true Clustering: name: Clustering diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml index b4586cd..1885024 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml @@ -57,6 +57,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for tags. range: VectorIndex required: false @@ -67,6 +70,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: An index into a TimeSeries object. range: TimeSeriesReferenceVectorData required: false @@ -77,6 +83,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for timeseries. range: VectorIndex required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml index f0c47ab..a0978ba 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml @@ -206,14 +206,9 @@ classes: having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data. - multivalued: true - inlined: true - inlined_as_list: false - any_of: - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals - - range: TimeIntervals + range: NWBFile__intervals + required: false + multivalued: false units: name: units description: Data about sorted spike units. @@ -696,6 +691,44 @@ classes: range: ExperimentalConditionsTable required: false multivalued: false + NWBFile__intervals: + name: NWBFile__intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + attributes: + name: + name: name + ifabsent: string(intervals) + range: string + required: true + equals_string: intervals + epochs: + name: epochs + description: Divisions in time marking experimental stages or sub-divisions + of a single recording session. + range: TimeIntervals + required: false + multivalued: false + trials: + name: trials + description: Repeated experimental events that have a logical grouping. + range: TimeIntervals + required: false + multivalued: false + invalid_times: + name: invalid_times + description: Time intervals that should be removed from analysis. + range: TimeIntervals + required: false + multivalued: false + time_intervals: + name: time_intervals + description: Optional additional table(s) for describing other experimental + time intervals. + range: TimeIntervals + required: false + multivalued: true LabMetaData: name: LabMetaData description: Lab-specific meta-data. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml index fffe23a..3ad625d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml @@ -46,6 +46,17 @@ classes: range: float32 required: false multivalued: false + electrode: + name: electrode + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: IntracellularElectrode + - range: string tree_root: true PatchClampSeries__data: name: PatchClampSeries__data @@ -63,8 +74,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -507,6 +518,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true SweepTable: name: SweepTable @@ -541,6 +563,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index for series. range: VectorIndex required: true @@ -585,6 +610,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Column storing the reference to the recorded stimulus for the recording (rows). range: TimeSeriesReferenceVectorData @@ -596,6 +624,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Column storing the reference to the stimulus template for the recording (rows). range: TimeSeriesReferenceVectorData @@ -621,6 +652,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Column storing the reference to the recorded response for the recording (rows) range: TimeSeriesReferenceVectorData @@ -698,6 +732,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the recordings column. range: VectorIndex required: true @@ -748,6 +785,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the simultaneous_recordings column. range: VectorIndex required: true @@ -807,6 +847,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the sequential_recordings column. range: VectorIndex required: true @@ -854,6 +897,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index dataset for the repetitions column. range: VectorIndex required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml index 645839e..b87f670 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml @@ -105,6 +105,17 @@ classes: range: text required: false multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: Device + - range: string tree_root: true ImageSeries__external_file: name: ImageSeries__external_file @@ -134,8 +145,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 - array: - name: array + value: + name: value array: dimensions: - alias: num_files @@ -153,6 +164,17 @@ classes: name: name range: string required: true + masked_imageseries: + name: masked_imageseries + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true OpticalSeries: name: OpticalSeries @@ -240,4 +262,26 @@ classes: range: uint32 required: true multivalued: false + indexed_timeseries: + name: indexed_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: ImageSeries + - range: string + indexed_images: + name: indexed_images + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: Images + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml index 0b95ca2..bedbbeb 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml @@ -73,8 +73,8 @@ classes: the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". range: text - array: - name: array + value: + name: value range: numeric any_of: - array: @@ -158,6 +158,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion pointer to the channels that this decomposition series was generated from. range: DynamicTableRegion @@ -170,6 +173,17 @@ classes: range: DecompositionSeries__bands required: true multivalued: false + source_timeseries: + name: source_timeseries + annotations: + source_type: + tag: source_type + value: link + required: false + multivalued: false + any_of: + - range: TimeSeries + - range: string tree_root: true DecompositionSeries__data: name: DecompositionSeries__data @@ -187,8 +201,8 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_times @@ -263,6 +277,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the spike_times dataset. range: VectorIndex required: false @@ -279,6 +296,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the obs_intervals dataset. range: VectorIndex required: false @@ -300,6 +320,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into electrodes. range: VectorIndex required: false @@ -310,6 +333,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Electrode that each spike unit came from, specified using a DynamicTableRegion. range: DynamicTableRegion required: false @@ -390,6 +416,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. range: VectorIndex @@ -401,6 +430,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. range: VectorIndex diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml index 5c6344e..085004d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml @@ -39,6 +39,17 @@ classes: dimensions: - alias: num_times - alias: num_rois + site: + name: site + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: OptogeneticStimulusSite + - range: string tree_root: true OptogeneticStimulusSite: name: OptogeneticStimulusSite @@ -69,4 +80,15 @@ classes: range: text required: true multivalued: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: Device + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml index b3004c8..2e5036c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml @@ -51,6 +51,17 @@ classes: name: intensity description: Intensity of the excitation in mW/mm^2, if known. range: float32 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true TwoPhotonSeries: name: TwoPhotonSeries @@ -86,6 +97,17 @@ classes: dimensions: - alias: width_height_depth exact_cardinality: 3 + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true RoiResponseSeries: name: RoiResponseSeries @@ -117,6 +139,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. range: DynamicTableRegion @@ -130,8 +155,8 @@ classes: for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -145,8 +170,8 @@ classes: for ROIs and for image planes). is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -164,8 +189,8 @@ classes: is required and ROI names should remain consistent between them. is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -194,6 +219,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into pixel_mask. range: VectorIndex required: false @@ -212,6 +240,9 @@ classes: named: tag: named value: true + source_type: + tag: source_type + value: neurodata_type_inc description: Index into voxel_mask. range: VectorIndex required: false @@ -232,6 +263,17 @@ classes: inlined_as_list: false any_of: - range: ImageSeries + imaging_plane: + name: imaging_plane + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImagingPlane + - range: string tree_root: true PlaneSegmentation__image_mask: name: PlaneSegmentation__image_mask @@ -318,14 +360,208 @@ classes: description: An imaging plane and its metadata. is_a: NWBContainer attributes: - children: - name: children + name: + name: name + range: string + required: true + description: + name: description + description: Description of the imaging plane. + range: text + required: false + multivalued: false + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + range: float32 + required: true + multivalued: false + imaging_rate: + name: imaging_rate + description: Rate that images are acquired, in Hz. If the corresponding TimeSeries + is present, the rate should be stored there instead. + range: float32 + required: false + multivalued: false + indicator: + name: indicator + description: Calcium indicator. + range: text + required: true + multivalued: false + location: + name: location + description: Location of the imaging plane. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + range: text + required: true + multivalued: false + manifold: + name: manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents + the position of the pixel relative to the defined coordinate space. Deprecated + in favor of origin_coords and grid_spacing. + range: ImagingPlane__manifold + required: false + multivalued: false + origin_coords: + name: origin_coords + description: Physical location of the first element of the imaging plane (0, + 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for + what the physical location is relative to (e.g., bregma). + range: ImagingPlane__origin_coords + required: false + multivalued: false + grid_spacing: + name: grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also + reference_frame to interpret the grid. + range: ImagingPlane__grid_spacing + required: false + multivalued: false + reference_frame: + name: reference_frame + description: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and + orientation of the grid defined by origin_coords and grid_spacing or the + vectors needed to transform or rotate the grid to a common anatomical axis + (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and + grid_spacing. If origin_coords and grid_spacing are not present, then this + field is not required. For example, if the microscope takes 10 x 10 x 2 + images, where the first value of the data matrix (index (0, 0, 0)) corresponds + to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is + 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means + more anterior, larger numbers in y means more rightward, and larger numbers + in z means more ventral, then enter the following -- origin_coords = (-1.2, + -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates + are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral + axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral + axis (larger index = more ventral)." + range: text + required: false + multivalued: false + optical_channel: + name: optical_channel + description: An optical channel used to record from an imaging plane. + range: OpticalChannel + required: true multivalued: true - inlined: true - inlined_as_list: false + device: + name: device + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false any_of: - - range: OpticalChannel + - range: Device + - range: string tree_root: true + ImagingPlane__manifold: + name: ImagingPlane__manifold + description: DEPRECATED Physical position of each pixel. 'xyz' represents the + position of the pixel relative to the defined coordinate space. Deprecated in + favor of origin_coords and grid_spacing. + attributes: + name: + name: name + ifabsent: string(manifold) + range: string + required: true + equals_string: manifold + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as pixels from + x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then + the 'conversion' multiplier to get from raw data acquisition pixel units + to meters is 2/1000. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: height + - alias: width + - alias: x_y_z + exact_cardinality: 3 + - array: + dimensions: + - alias: height + - alias: width + - alias: depth + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__origin_coords: + name: ImagingPlane__origin_coords + description: Physical location of the first element of the imaging plane (0, 0) + for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the + physical location is relative to (e.g., bregma). + attributes: + name: + name: name + ifabsent: string(origin_coords) + range: string + required: true + equals_string: origin_coords + unit: + name: unit + description: Measurement units for origin_coords. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 + ImagingPlane__grid_spacing: + name: ImagingPlane__grid_spacing + description: Space between pixels in (x, y) or voxels in (x, y, z) directions, + in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame + to interpret the grid. + attributes: + name: + name: name + ifabsent: string(grid_spacing) + range: string + required: true + equals_string: grid_spacing + unit: + name: unit + description: Measurement units for grid_spacing. The default value is 'meters'. + range: text + value: + name: value + range: float32 + any_of: + - array: + dimensions: + - alias: x_y + exact_cardinality: 2 + - array: + dimensions: + - alias: x_y_z + exact_cardinality: 3 OpticalChannel: name: OpticalChannel description: An optical channel used to record from an imaging plane. @@ -355,8 +591,8 @@ classes: frame at each point in time is assumed to be 2-D (has only x & y dimensions).' is_a: NWBDataInterface attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false @@ -385,4 +621,15 @@ classes: range: TimeSeries required: true multivalued: false + original: + name: original + annotations: + source_type: + tag: source_type + value: link + required: true + multivalued: false + any_of: + - range: ImageSeries + - range: string tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml index 4bcc17a..f3173d3 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml @@ -114,8 +114,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -145,8 +145,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -175,8 +175,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -206,8 +206,8 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -246,8 +246,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -273,8 +273,8 @@ classes: name: field_of_view description: Size of viewing area, in meters. range: float32 - array: - name: array + value: + name: value array: dimensions: - alias: num_rows @@ -309,8 +309,8 @@ classes: name: format description: Format of image. Right now only 'raw' is supported. range: text - array: - name: array + value: + name: value array: dimensions: - alias: num_rows diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml index 48a60ac..ec35619 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml @@ -55,8 +55,8 @@ classes: name: description description: Description of what these vectors represent. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -93,8 +93,8 @@ classes: name: target description: Reference to the target dataset that this index applies to. range: VectorData - array: - name: array + value: + name: value array: dimensions: - alias: num_rows diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml index e57c52a..1244aae 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.base.yaml @@ -36,8 +36,8 @@ classes: description: A simple Container for holding onto multiple containers. is_a: Container attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml index cbaf95a..aaef099 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml @@ -33,8 +33,8 @@ classes: name: description description: Description of what these vectors represent. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -175,8 +175,8 @@ classes: by a separate DynamicTable stored within the group. is_a: DynamicTable attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml index 0365fab..b03629e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml @@ -36,8 +36,8 @@ classes: description: A simple Container for holding onto multiple containers. is_a: Container attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml index ecd7b48..d3398c4 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml @@ -33,8 +33,8 @@ classes: name: description description: Description of what these vectors represent. range: text - array: - name: array + value: + name: value range: AnyType any_of: - array: @@ -175,8 +175,8 @@ classes: by a separate DynamicTable stored within the group. is_a: DynamicTable attributes: - children: - name: children + value: + name: value multivalued: true inlined: true inlined_as_list: false From 374bd8971db09e2d31ebd69676676c25907e6b54 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 17:06:59 -0700 Subject: [PATCH 14/61] fix name array -> value in hdmf mixins --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 18 +++++++++--------- scripts/generate_core.py | 1 - 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 10aa05f..acac675 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -187,21 +187,21 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing return self._index[item] else: - return self.array[item] + return self.value[item] def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class VectorIndexMixin(BaseModel): @@ -210,7 +210,7 @@ class VectorIndexMixin(BaseModel): """ # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None target: Optional["VectorData"] = None def _getitem_helper(self, arg: int) -> Union[list, NDArray]: @@ -218,18 +218,18 @@ class VectorIndexMixin(BaseModel): Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] return self.target.array[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: - return self.array[item] + return self.value[item] elif type(self.target).__name__ == "VectorData": if isinstance(item, int): return self._getitem_helper(item) else: - idx = range(*item.indices(len(self.array))) + idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: raise NotImplementedError("DynamicTableRange not supported yet") @@ -239,7 +239,7 @@ class VectorIndexMixin(BaseModel): # VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value DYNAMIC_TABLE_IMPORTS = Imports( diff --git a/scripts/generate_core.py b/scripts/generate_core.py index 7f86171..826bf30 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -1,4 +1,3 @@ -import pdb import shutil import os import traceback From 652ddb3b4846957f0894b68727e9e1a6bc763353 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 17:09:58 -0700 Subject: [PATCH 15/61] update hdmf common table --- .../hdmf_common/v1_1_0/hdmf_common_table.py | 18 +++++++++--------- .../hdmf_common/v1_1_2/hdmf_common_table.py | 18 +++++++++--------- .../hdmf_common/v1_1_3/hdmf_common_table.py | 18 +++++++++--------- .../hdmf_common/v1_5_0/hdmf_common_table.py | 18 +++++++++--------- .../hdmf_common/v1_8_0/hdmf_common_table.py | 18 +++++++++--------- 5 files changed, 45 insertions(+), 45 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index b212cef..93d1574 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -57,21 +57,21 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing return self._index[item] else: - return self.array[item] + return self.value[item] def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class VectorIndexMixin(BaseModel): @@ -80,7 +80,7 @@ class VectorIndexMixin(BaseModel): """ # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None target: Optional["VectorData"] = None def _getitem_helper(self, arg: int) -> Union[list, NDArray]: @@ -88,18 +88,18 @@ class VectorIndexMixin(BaseModel): Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] return self.target.array[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: - return self.array[item] + return self.value[item] elif type(self.target).__name__ == "VectorData": if isinstance(item, int): return self._getitem_helper(item) else: - idx = range(*item.indices(len(self.array))) + idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: raise NotImplementedError("DynamicTableRange not supported yet") @@ -109,7 +109,7 @@ class VectorIndexMixin(BaseModel): # VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class DynamicTableMixin(BaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 9d1bdb6..25748ee 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -57,21 +57,21 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing return self._index[item] else: - return self.array[item] + return self.value[item] def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class VectorIndexMixin(BaseModel): @@ -80,7 +80,7 @@ class VectorIndexMixin(BaseModel): """ # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None target: Optional["VectorData"] = None def _getitem_helper(self, arg: int) -> Union[list, NDArray]: @@ -88,18 +88,18 @@ class VectorIndexMixin(BaseModel): Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] return self.target.array[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: - return self.array[item] + return self.value[item] elif type(self.target).__name__ == "VectorData": if isinstance(item, int): return self._getitem_helper(item) else: - idx = range(*item.indices(len(self.array))) + idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: raise NotImplementedError("DynamicTableRange not supported yet") @@ -109,7 +109,7 @@ class VectorIndexMixin(BaseModel): # VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class DynamicTableMixin(BaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 7ca0724..3d4762c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -57,21 +57,21 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing return self._index[item] else: - return self.array[item] + return self.value[item] def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class VectorIndexMixin(BaseModel): @@ -80,7 +80,7 @@ class VectorIndexMixin(BaseModel): """ # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None target: Optional["VectorData"] = None def _getitem_helper(self, arg: int) -> Union[list, NDArray]: @@ -88,18 +88,18 @@ class VectorIndexMixin(BaseModel): Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] return self.target.array[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: - return self.array[item] + return self.value[item] elif type(self.target).__name__ == "VectorData": if isinstance(item, int): return self._getitem_helper(item) else: - idx = range(*item.indices(len(self.array))) + idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: raise NotImplementedError("DynamicTableRange not supported yet") @@ -109,7 +109,7 @@ class VectorIndexMixin(BaseModel): # VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class DynamicTableMixin(BaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 7df7183..61e5ba2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -58,21 +58,21 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing return self._index[item] else: - return self.array[item] + return self.value[item] def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class VectorIndexMixin(BaseModel): @@ -81,7 +81,7 @@ class VectorIndexMixin(BaseModel): """ # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None target: Optional["VectorData"] = None def _getitem_helper(self, arg: int) -> Union[list, NDArray]: @@ -89,18 +89,18 @@ class VectorIndexMixin(BaseModel): Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] return self.target.array[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: - return self.array[item] + return self.value[item] elif type(self.target).__name__ == "VectorData": if isinstance(item, int): return self._getitem_helper(item) else: - idx = range(*item.indices(len(self.array))) + idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: raise NotImplementedError("DynamicTableRange not supported yet") @@ -110,7 +110,7 @@ class VectorIndexMixin(BaseModel): # VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class DynamicTableMixin(BaseModel): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index e168269..e1e413c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -58,21 +58,21 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing return self._index[item] else: - return self.array[item] + return self.value[item] def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class VectorIndexMixin(BaseModel): @@ -81,7 +81,7 @@ class VectorIndexMixin(BaseModel): """ # redefined in `VectorData`, but included here for testing and type checking - array: Optional[NDArray] = None + value: Optional[NDArray] = None target: Optional["VectorData"] = None def _getitem_helper(self, arg: int) -> Union[list, NDArray]: @@ -89,18 +89,18 @@ class VectorIndexMixin(BaseModel): Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.array[arg - 1] - end = self.array[arg] + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] return self.target.array[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: - return self.array[item] + return self.value[item] elif type(self.target).__name__ == "VectorData": if isinstance(item, int): return self._getitem_helper(item) else: - idx = range(*item.indices(len(self.array))) + idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: raise NotImplementedError("DynamicTableRange not supported yet") @@ -110,7 +110,7 @@ class VectorIndexMixin(BaseModel): # VectorIndex is the thing that knows how to do the slicing self._index[key] = value else: - self.array[key] = value + self.value[key] = value class DynamicTableMixin(BaseModel): From da6d0d860897ee17f832311004f8bb59cb001d13 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 18:41:38 -0700 Subject: [PATCH 16/61] correctly handle attributes --- nwb_linkml/src/nwb_linkml/adapters/adapter.py | 35 +++- .../src/nwb_linkml/adapters/attribute.py | 194 ++++++++++++++++++ nwb_linkml/src/nwb_linkml/adapters/classes.py | 47 +---- nwb_linkml/src/nwb_linkml/adapters/dataset.py | 67 ++---- nwb_linkml/src/nwb_linkml/maps/dtype.py | 32 +++ scripts/generate_core.py | 11 +- 6 files changed, 290 insertions(+), 96 deletions(-) create mode 100644 nwb_linkml/src/nwb_linkml/adapters/attribute.py diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index 561df65..6823bb0 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -26,7 +26,7 @@ from linkml_runtime.linkml_model import ( ) from pydantic import BaseModel -from nwb_schema_language import Attribute, Dataset, Group, Schema +from nwb_schema_language import Attribute, Dataset, Group, Schema, CompoundDtype if sys.version_info.minor >= 11: from typing import TypeVarTuple, Unpack @@ -238,3 +238,36 @@ class Adapter(BaseModel): for item in self.walk(input): if any([type(item) is atype for atype in get_type]): yield item + + +def is_1d(cls: Dataset | Attribute) -> bool: + """ + Check if the values of a dataset are 1-dimensional. + + Specifically: + * a single-layer dim/shape list of length 1, or + * a nested dim/shape list where every nested spec is of length 1 + """ + return ( + not any([isinstance(dim, list) for dim in cls.dims]) and len(cls.dims) == 1 + ) or ( # nested list + all([isinstance(dim, list) for dim in cls.dims]) + and len(cls.dims) == 1 + and len(cls.dims[0]) == 1 + ) + + +def is_compound(cls: Dataset) -> bool: + """Check if dataset has a compound dtype""" + return ( + isinstance(cls.dtype, list) + and len(cls.dtype) > 0 + and isinstance(cls.dtype[0], CompoundDtype) + ) + + +def has_attrs(cls: Dataset) -> bool: + """ + Check if a dataset has any attributes at all without defaults + """ + return len(cls.attributes) > 0 and all([not a.value for a in cls.attributes]) diff --git a/nwb_linkml/src/nwb_linkml/adapters/attribute.py b/nwb_linkml/src/nwb_linkml/adapters/attribute.py new file mode 100644 index 0000000..c7ebd19 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/adapters/attribute.py @@ -0,0 +1,194 @@ +""" +Adapters for attribute types +""" + +from abc import abstractmethod +from typing import ClassVar, Optional, TypedDict, Type + +from linkml_runtime.linkml_model.meta import SlotDefinition + +from nwb_linkml.adapters.array import ArrayAdapter +from nwb_linkml.adapters.adapter import BuildResult, is_1d, Adapter +from nwb_linkml.maps import Map +from nwb_linkml.maps.dtype import handle_dtype +from nwb_schema_language import Attribute + + +def _make_ifabsent(val: str | int | float | None) -> str | None: + if val is None: + return None + elif isinstance(val, str): + return f"string({val})" + elif isinstance(val, int): + return f"integer({val})" + elif isinstance(val, float): + return f"float({val})" + else: + return str(value) + + +class AttrDefaults(TypedDict): + equals_string: str | None + equals_number: float | int | None + ifabsent: str | None + + +class AttributeMap(Map): + + @classmethod + def handle_defaults(cls, attr: Attribute) -> AttrDefaults: + """ + Construct arguments for linkml slot default metaslots from nwb schema lang attribute props + """ + equals_string = None + equals_number = None + default_value = None + if attr.value: + if isinstance(attr.value, (int, float)): + equals_number = attr.value + elif attr.value: + equals_string = str(attr.value) + + if equals_number: + default_value = _make_ifabsent(equals_number) + elif equals_string: + default_value = _make_ifabsent(equals_string) + elif attr.default_value: + default_value = _make_ifabsent(attr.default_value) + + return AttrDefaults( + equals_string=equals_string, equals_number=equals_number, ifabsent=default_value + ) + + @classmethod + @abstractmethod + def check(cls, attr: Attribute) -> bool: + """ + Check if this map applies + """ + pass # pragma: no cover + + @classmethod + @abstractmethod + def apply( + cls, attr: Attribute, res: Optional[BuildResult] = None, name: Optional[str] = None + ) -> BuildResult: + """ + Apply this mapping + """ + pass # pragma: no cover + + +class MapScalar(AttributeMap): + """ + Map a simple scalar value + """ + + @classmethod + def check(cls, attr: Attribute) -> bool: + """ + Check if we are a scalar value! + """ + return not attr.dims and not attr.shape + + @classmethod + def apply(cls, attr: Attribute, res: Optional[BuildResult] = None) -> BuildResult: + """ + Make a slot for us! + """ + slot = SlotDefinition( + name=attr.name, + range=handle_dtype(attr.dtype), + description=attr.doc, + required=attr.required, + **cls.handle_defaults(attr), + ) + return BuildResult(slots=[slot]) + + +class MapArray(AttributeMap): + """ + Map an array value! + """ + + @classmethod + def check(cls, attr: Attribute) -> bool: + """ + Check that we have some array specification! + """ + return bool(attr.dims) or attr.shape + + @classmethod + def apply(cls, attr: Attribute, res: Optional[BuildResult] = None) -> BuildResult: + """ + Make a slot with an array expression! + + If we're just a 1D array, use a list (set multivalued: true). + If more than that, make an array descriptor + """ + expressions = {} + multivalued = False + if is_1d(attr): + multivalued = True + else: + # --------------------------------- + # SPECIAL CASE: Some old versions of HDMF don't have ``dims``, only shape + # --------------------------------- + shape = attr.shape + dims = attr.dims + if shape and not dims: + dims = ["null"] * len(shape) + + array_adapter = ArrayAdapter(dims, shape) + expressions = array_adapter.make_slot() + + slot = SlotDefinition( + name=attr.name, + range=handle_dtype(attr.dtype), + multivalued=multivalued, + description=attr.doc, + required=attr.required, + **expressions, + **cls.handle_defaults(attr), + ) + return BuildResult(slots=[slot]) + + +class AttributeAdapter(Adapter): + """ + Create slot definitions from nwb schema language attributes + """ + + TYPE: ClassVar[Type] = Attribute + + cls: Attribute + + def build(self) -> "BuildResult": + """ + Build the slot definitions, every attribute should have a map. + """ + map = self.match() + return map.apply(self.cls) + + def match(self) -> Optional[Type[AttributeMap]]: + """ + Find the map class that applies to this attribute + + Returns: + :class:`.AttributeMap` + + Raises: + RuntimeError - if more than one map matches + """ + # find a map to use + matches = [m for m in AttributeMap.__subclasses__() if m.check(self.cls)] + + if len(matches) > 1: # pragma: no cover + raise RuntimeError( + "Only one map should apply to a dataset, you need to refactor the maps! Got maps:" + f" {matches}" + ) + elif len(matches) == 0: + return None + else: + return matches[0] diff --git a/nwb_linkml/src/nwb_linkml/adapters/classes.py b/nwb_linkml/src/nwb_linkml/adapters/classes.py index fcdf43c..054a401 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/classes.py +++ b/nwb_linkml/src/nwb_linkml/adapters/classes.py @@ -9,9 +9,10 @@ from linkml_runtime.linkml_model import ClassDefinition, SlotDefinition from pydantic import field_validator from nwb_linkml.adapters.adapter import Adapter, BuildResult +from nwb_linkml.adapters.attribute import AttributeAdapter from nwb_linkml.maps import QUANTITY_MAP from nwb_linkml.maps.naming import camel_to_snake -from nwb_schema_language import CompoundDtype, Dataset, DTypeType, FlatDtype, Group, ReferenceDtype +from nwb_schema_language import Dataset, Group T = TypeVar("T", bound=Type[Dataset] | Type[Group]) TI = TypeVar("TI", bound=Dataset | Group) @@ -118,16 +119,9 @@ class ClassAdapter(Adapter): Returns: list[:class:`.SlotDefinition`] """ - attrs = [ - SlotDefinition( - name=attr.name, - description=attr.doc, - range=self.handle_dtype(attr.dtype), - ) - for attr in cls.attributes - ] - - return attrs + results = [AttributeAdapter(cls=attr).build() for attr in cls.attributes] + slots = [r.slots[0] for r in results] + return slots def _get_full_name(self) -> str: """The full name of the object in the generated linkml @@ -205,37 +199,6 @@ class ClassAdapter(Adapter): return name - @classmethod - def handle_dtype(cls, dtype: DTypeType | None) -> str: - """ - Get the string form of a dtype - - Args: - dtype (:class:`.DTypeType`): Dtype to stringify - - Returns: - str - """ - if isinstance(dtype, ReferenceDtype): - return dtype.target_type - elif dtype is None or dtype == []: - # Some ill-defined datasets are "abstract" despite that not being in the schema language - return "AnyType" - elif isinstance(dtype, FlatDtype): - return dtype.value - elif isinstance(dtype, list) and isinstance(dtype[0], CompoundDtype): - # there is precisely one class that uses compound dtypes: - # TimeSeriesReferenceVectorData - # compoundDtypes are able to define a ragged table according to the schema - # but are used in this single case equivalently to attributes. - # so we'll... uh... treat them as slots. - # TODO - return "AnyType" - - else: - # flat dtype - return dtype - def build_name_slot(self) -> SlotDefinition: """ If a class has a name, then that name should be a slot with a diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py index e9268cb..bcbd109 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py +++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py @@ -7,13 +7,13 @@ from typing import ClassVar, Optional, Type from linkml_runtime.linkml_model.meta import ArrayExpression, SlotDefinition -from nwb_linkml.adapters.adapter import BuildResult +from nwb_linkml.adapters.adapter import BuildResult, is_1d, is_compound, has_attrs from nwb_linkml.adapters.array import ArrayAdapter from nwb_linkml.adapters.classes import ClassAdapter from nwb_linkml.maps import QUANTITY_MAP, Map -from nwb_linkml.maps.dtype import flat_to_linkml +from nwb_linkml.maps.dtype import flat_to_linkml, handle_dtype from nwb_linkml.maps.naming import camel_to_snake -from nwb_schema_language import CompoundDtype, Dataset +from nwb_schema_language import Dataset class DatasetMap(Map): @@ -106,7 +106,7 @@ class MapScalar(DatasetMap): this_slot = SlotDefinition( name=cls.name, description=cls.doc, - range=ClassAdapter.handle_dtype(cls.dtype), + range=handle_dtype(cls.dtype), **QUANTITY_MAP[cls.quantity], ) res = BuildResult(slots=[this_slot]) @@ -203,9 +203,7 @@ class MapScalarAttributes(DatasetMap): """ Map to a scalar attribute with an adjoining "value" slot """ - value_slot = SlotDefinition( - name="value", range=ClassAdapter.handle_dtype(cls.dtype), required=True - ) + value_slot = SlotDefinition(name="value", range=handle_dtype(cls.dtype), required=True) res.classes[0].attributes["value"] = value_slot return res @@ -271,7 +269,7 @@ class MapListlike(DatasetMap): * - ``dtype`` - ``Class`` """ - dtype = ClassAdapter.handle_dtype(cls.dtype) + dtype = handle_dtype(cls.dtype) return ( cls.neurodata_type_inc != "VectorData" and is_1d(cls) @@ -289,7 +287,7 @@ class MapListlike(DatasetMap): slot = SlotDefinition( name="value", multivalued=True, - range=ClassAdapter.handle_dtype(cls.dtype), + range=handle_dtype(cls.dtype), description=cls.doc, required=cls.quantity not in ("*", "?"), annotations=[{"source_type": "reference"}], @@ -378,7 +376,7 @@ class MapArraylike(DatasetMap): - ``False`` """ - dtype = ClassAdapter.handle_dtype(cls.dtype) + dtype = handle_dtype(cls.dtype) return ( cls.name and (all([cls.dims, cls.shape]) or cls.neurodata_type_inc == "VectorData") @@ -409,7 +407,7 @@ class MapArraylike(DatasetMap): SlotDefinition( name=name, multivalued=False, - range=ClassAdapter.handle_dtype(cls.dtype), + range=handle_dtype(cls.dtype), description=cls.doc, required=cls.quantity not in ("*", "?"), **expressions, @@ -513,7 +511,7 @@ class MapArrayLikeAttributes(DatasetMap): """ Check that we're an array with some additional metadata """ - dtype = ClassAdapter.handle_dtype(cls.dtype) + dtype = handle_dtype(cls.dtype) return ( all([cls.dims, cls.shape]) and cls.neurodata_type_inc != "VectorData" @@ -532,9 +530,7 @@ class MapArrayLikeAttributes(DatasetMap): array_adapter = ArrayAdapter(cls.dims, cls.shape) expressions = array_adapter.make_slot() # make a slot for the arraylike class - array_slot = SlotDefinition( - name="value", range=ClassAdapter.handle_dtype(cls.dtype), **expressions - ) + array_slot = SlotDefinition(name="value", range=handle_dtype(cls.dtype), **expressions) res.classes[0].attributes.update({"value": array_slot}) return res @@ -596,7 +592,7 @@ class MapVectorClassRange(DatasetMap): Check that we are a VectorData object without any additional attributes with a dtype that refers to another class """ - dtype = ClassAdapter.handle_dtype(cls.dtype) + dtype = handle_dtype(cls.dtype) return ( cls.neurodata_type_inc == "VectorData" and cls.name @@ -617,7 +613,7 @@ class MapVectorClassRange(DatasetMap): name=cls.name, description=cls.doc, multivalued=True, - range=ClassAdapter.handle_dtype(cls.dtype), + range=handle_dtype(cls.dtype), required=cls.quantity not in ("*", "?"), ) res = BuildResult(slots=[this_slot]) @@ -672,7 +668,7 @@ class MapVectorClassRange(DatasetMap): # this_slot = SlotDefinition( # name=cls.name, # description=cls.doc, -# range=ClassAdapter.handle_dtype(cls.dtype), +# range=handle_dtype(cls.dtype), # multivalued=True, # ) # # No need to make a class for us, so we replace the existing build results @@ -783,7 +779,7 @@ class MapCompoundDtype(DatasetMap): slots[a_dtype.name] = SlotDefinition( name=a_dtype.name, description=a_dtype.doc, - range=ClassAdapter.handle_dtype(a_dtype.dtype), + range=handle_dtype(a_dtype.dtype), **QUANTITY_MAP[cls.quantity], ) res.classes[0].attributes.update(slots) @@ -836,36 +832,3 @@ class DatasetAdapter(ClassAdapter): return None else: return matches[0] - - -def is_1d(cls: Dataset) -> bool: - """ - Check if the values of a dataset are 1-dimensional. - - Specifically: - * a single-layer dim/shape list of length 1, or - * a nested dim/shape list where every nested spec is of length 1 - """ - return ( - not any([isinstance(dim, list) for dim in cls.dims]) and len(cls.dims) == 1 - ) or ( # nested list - all([isinstance(dim, list) for dim in cls.dims]) - and len(cls.dims) == 1 - and len(cls.dims[0]) == 1 - ) - - -def is_compound(cls: Dataset) -> bool: - """Check if dataset has a compound dtype""" - return ( - isinstance(cls.dtype, list) - and len(cls.dtype) > 0 - and isinstance(cls.dtype[0], CompoundDtype) - ) - - -def has_attrs(cls: Dataset) -> bool: - """ - Check if a dataset has any attributes at all without defaults - """ - return len(cls.attributes) > 0 and all([not a.value for a in cls.attributes]) diff --git a/nwb_linkml/src/nwb_linkml/maps/dtype.py b/nwb_linkml/src/nwb_linkml/maps/dtype.py index 9a7756f..0cc7c79 100644 --- a/nwb_linkml/src/nwb_linkml/maps/dtype.py +++ b/nwb_linkml/src/nwb_linkml/maps/dtype.py @@ -7,6 +7,7 @@ from typing import Any, Type import nptyping import numpy as np +from nwb_schema_language import CompoundDtype, DTypeType, FlatDtype, ReferenceDtype flat_to_linkml = { "float": "float", @@ -185,3 +186,34 @@ def struct_from_dtype(dtype: np.dtype) -> Type[nptyping.Structure]: struct_pieces = [f"{k}: {flat_to_nptyping[v[0].name]}" for k, v in dtype.fields.items()] struct_dtype = ", ".join(struct_pieces) return nptyping.Structure[struct_dtype] + + +def handle_dtype(dtype: DTypeType | None) -> str: + """ + Get the string form of a dtype + + Args: + dtype (:class:`.DTypeType`): Dtype to stringify + + Returns: + str + """ + if isinstance(dtype, ReferenceDtype): + return dtype.target_type + elif dtype is None or dtype == []: + # Some ill-defined datasets are "abstract" despite that not being in the schema language + return "AnyType" + elif isinstance(dtype, FlatDtype): + return dtype.value + elif isinstance(dtype, list) and isinstance(dtype[0], CompoundDtype): + # there is precisely one class that uses compound dtypes: + # TimeSeriesReferenceVectorData + # compoundDtypes are able to define a ragged table according to the schema + # but are used in this single case equivalently to attributes. + # so we'll... uh... treat them as slots. + # TODO + return "AnyType" + + else: + # flat dtype + return dtype diff --git a/scripts/generate_core.py b/scripts/generate_core.py index 826bf30..6cbb83f 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -1,6 +1,8 @@ import shutil import os +import sys import traceback +from pdb import post_mortem from argparse import ArgumentParser from pathlib import Path @@ -53,6 +55,7 @@ def generate_versions( dry_run: bool = False, repo: GitRepo = NWB_CORE_REPO, hdmf_only=False, + pdb=False, ): """ Generate linkml models for all versions @@ -128,6 +131,11 @@ def generate_versions( build_progress.update(pydantic_task, action="Built Pydantic") except Exception as e: + if pdb: + live.stop() + post_mortem() + sys.exit(1) + build_progress.stop_task(linkml_task) if linkml_task is not None: build_progress.update(linkml_task, action="[bold red]LinkML Build Failed") @@ -205,6 +213,7 @@ def parser() -> ArgumentParser: ), action="store_true", ) + parser.add_argument("--pdb", help="Launch debugger on an error", action="store_true") return parser @@ -222,7 +231,7 @@ def main(): generate_core_yaml(args.yaml, args.dry_run, args.hdmf) generate_core_pydantic(args.yaml, args.pydantic, args.dry_run) else: - generate_versions(args.yaml, args.pydantic, args.dry_run, repo, args.hdmf) + generate_versions(args.yaml, args.pydantic, args.dry_run, repo, args.hdmf, pdb=args.pdb) if __name__ == "__main__": From e5d1cc52de760667a548550da8d424256c67de23 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 18:41:53 -0700 Subject: [PATCH 17/61] regenerate models --- .../pydantic/core/v2_2_0/core_nwb_base.py | 33 +- .../pydantic/core/v2_2_0/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_2_0/core_nwb_ecephys.py | 24 +- .../pydantic/core/v2_2_0/core_nwb_epoch.py | 12 +- .../pydantic/core/v2_2_0/core_nwb_file.py | 15 +- .../pydantic/core/v2_2_0/core_nwb_icephys.py | 159 ++++++---- .../pydantic/core/v2_2_0/core_nwb_image.py | 40 ++- .../pydantic/core/v2_2_0/core_nwb_misc.py | 64 ++-- .../pydantic/core/v2_2_0/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_2_0/core_nwb_ophys.py | 36 ++- .../core/v2_2_0/core_nwb_retinotopy.py | 46 ++- .../pydantic/core/v2_2_1/core_nwb_base.py | 33 +- .../pydantic/core/v2_2_1/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_2_1/core_nwb_ecephys.py | 24 +- .../pydantic/core/v2_2_1/core_nwb_epoch.py | 12 +- .../pydantic/core/v2_2_1/core_nwb_file.py | 15 +- .../pydantic/core/v2_2_1/core_nwb_icephys.py | 159 ++++++---- .../pydantic/core/v2_2_1/core_nwb_image.py | 40 ++- .../pydantic/core/v2_2_1/core_nwb_misc.py | 64 ++-- .../pydantic/core/v2_2_1/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_2_1/core_nwb_ophys.py | 36 ++- .../core/v2_2_1/core_nwb_retinotopy.py | 46 ++- .../pydantic/core/v2_2_2/core_nwb_base.py | 33 +- .../pydantic/core/v2_2_2/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_2_2/core_nwb_ecephys.py | 24 +- .../pydantic/core/v2_2_2/core_nwb_epoch.py | 12 +- .../pydantic/core/v2_2_2/core_nwb_file.py | 15 +- .../pydantic/core/v2_2_2/core_nwb_icephys.py | 159 ++++++---- .../pydantic/core/v2_2_2/core_nwb_image.py | 40 ++- .../pydantic/core/v2_2_2/core_nwb_misc.py | 64 ++-- .../pydantic/core/v2_2_2/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_2_2/core_nwb_ophys.py | 36 ++- .../core/v2_2_2/core_nwb_retinotopy.py | 76 ++--- .../pydantic/core/v2_2_4/core_nwb_base.py | 33 +- .../pydantic/core/v2_2_4/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_2_4/core_nwb_ecephys.py | 24 +- .../pydantic/core/v2_2_4/core_nwb_epoch.py | 12 +- .../pydantic/core/v2_2_4/core_nwb_file.py | 19 +- .../pydantic/core/v2_2_4/core_nwb_icephys.py | 159 ++++++---- .../pydantic/core/v2_2_4/core_nwb_image.py | 40 ++- .../pydantic/core/v2_2_4/core_nwb_misc.py | 64 ++-- .../pydantic/core/v2_2_4/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_2_4/core_nwb_ophys.py | 56 ++-- .../core/v2_2_4/core_nwb_retinotopy.py | 76 ++--- .../pydantic/core/v2_2_5/core_nwb_base.py | 33 +- .../pydantic/core/v2_2_5/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_2_5/core_nwb_ecephys.py | 24 +- .../pydantic/core/v2_2_5/core_nwb_epoch.py | 12 +- .../pydantic/core/v2_2_5/core_nwb_file.py | 19 +- .../pydantic/core/v2_2_5/core_nwb_icephys.py | 159 ++++++---- .../pydantic/core/v2_2_5/core_nwb_image.py | 40 ++- .../pydantic/core/v2_2_5/core_nwb_misc.py | 64 ++-- .../pydantic/core/v2_2_5/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_2_5/core_nwb_ophys.py | 56 ++-- .../core/v2_2_5/core_nwb_retinotopy.py | 76 ++--- .../pydantic/core/v2_3_0/core_nwb_base.py | 33 +- .../pydantic/core/v2_3_0/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_3_0/core_nwb_ecephys.py | 24 +- .../pydantic/core/v2_3_0/core_nwb_epoch.py | 12 +- .../pydantic/core/v2_3_0/core_nwb_file.py | 19 +- .../pydantic/core/v2_3_0/core_nwb_icephys.py | 160 ++++++---- .../pydantic/core/v2_3_0/core_nwb_image.py | 40 ++- .../pydantic/core/v2_3_0/core_nwb_misc.py | 64 ++-- .../pydantic/core/v2_3_0/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_3_0/core_nwb_ophys.py | 56 ++-- .../core/v2_3_0/core_nwb_retinotopy.py | 76 ++--- .../pydantic/core/v2_4_0/core_nwb_base.py | 37 ++- .../pydantic/core/v2_4_0/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_4_0/core_nwb_ecephys.py | 24 +- .../pydantic/core/v2_4_0/core_nwb_epoch.py | 12 +- .../pydantic/core/v2_4_0/core_nwb_file.py | 19 +- .../pydantic/core/v2_4_0/core_nwb_icephys.py | 295 +++++++++++------- .../pydantic/core/v2_4_0/core_nwb_image.py | 40 ++- .../pydantic/core/v2_4_0/core_nwb_misc.py | 64 ++-- .../pydantic/core/v2_4_0/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_4_0/core_nwb_ophys.py | 56 ++-- .../core/v2_4_0/core_nwb_retinotopy.py | 76 ++--- .../pydantic/core/v2_5_0/core_nwb_base.py | 37 ++- .../pydantic/core/v2_5_0/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_5_0/core_nwb_ecephys.py | 24 +- .../pydantic/core/v2_5_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_5_0/core_nwb_file.py | 19 +- .../pydantic/core/v2_5_0/core_nwb_icephys.py | 295 +++++++++++------- .../pydantic/core/v2_5_0/core_nwb_image.py | 40 ++- .../pydantic/core/v2_5_0/core_nwb_misc.py | 64 ++-- .../pydantic/core/v2_5_0/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_5_0/core_nwb_ophys.py | 56 ++-- .../core/v2_5_0/core_nwb_retinotopy.py | 76 ++--- .../core/v2_6_0_alpha/core_nwb_base.py | 37 ++- .../core/v2_6_0_alpha/core_nwb_behavior.py | 12 +- .../core/v2_6_0_alpha/core_nwb_ecephys.py | 24 +- .../core/v2_6_0_alpha/core_nwb_epoch.py | 8 +- .../core/v2_6_0_alpha/core_nwb_file.py | 22 +- .../core/v2_6_0_alpha/core_nwb_icephys.py | 295 +++++++++++------- .../core/v2_6_0_alpha/core_nwb_image.py | 40 ++- .../core/v2_6_0_alpha/core_nwb_misc.py | 64 ++-- .../core/v2_6_0_alpha/core_nwb_ogen.py | 9 +- .../core/v2_6_0_alpha/core_nwb_ophys.py | 65 ++-- .../core/v2_6_0_alpha/core_nwb_retinotopy.py | 76 ++--- .../pydantic/core/v2_7_0/core_nwb_base.py | 37 ++- .../pydantic/core/v2_7_0/core_nwb_behavior.py | 12 +- .../pydantic/core/v2_7_0/core_nwb_ecephys.py | 24 +- .../pydantic/core/v2_7_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_7_0/core_nwb_file.py | 22 +- .../pydantic/core/v2_7_0/core_nwb_icephys.py | 295 +++++++++++------- .../pydantic/core/v2_7_0/core_nwb_image.py | 40 ++- .../pydantic/core/v2_7_0/core_nwb_misc.py | 64 ++-- .../pydantic/core/v2_7_0/core_nwb_ogen.py | 9 +- .../pydantic/core/v2_7_0/core_nwb_ophys.py | 65 ++-- .../core/v2_7_0/core_nwb_retinotopy.py | 76 ++--- .../hdmf_common/v1_1_0/hdmf_common_sparse.py | 10 +- .../hdmf_common/v1_1_0/hdmf_common_table.py | 28 +- .../hdmf_common/v1_1_2/hdmf_common_sparse.py | 10 +- .../hdmf_common/v1_1_2/hdmf_common_table.py | 28 +- .../hdmf_common/v1_1_3/hdmf_common_sparse.py | 10 +- .../hdmf_common/v1_1_3/hdmf_common_table.py | 28 +- .../hdmf_common/v1_5_0/hdmf_common_sparse.py | 4 +- .../hdmf_common/v1_5_0/hdmf_common_table.py | 36 +-- .../hdmf_common/v1_8_0/hdmf_common_sparse.py | 4 +- .../hdmf_common/v1_8_0/hdmf_common_table.py | 36 +-- .../v0_1_0/hdmf_experimental_experimental.py | 8 +- .../v0_5_0/hdmf_experimental_experimental.py | 8 +- .../linkml/core/v2_2_0/core.nwb.base.yaml | 16 + .../linkml/core/v2_2_0/core.nwb.behavior.yaml | 2 + .../linkml/core/v2_2_0/core.nwb.device.yaml | 2 + .../linkml/core/v2_2_0/core.nwb.ecephys.yaml | 2 + .../linkml/core/v2_2_0/core.nwb.file.yaml | 4 + .../linkml/core/v2_2_0/core.nwb.icephys.yaml | 36 +++ .../linkml/core/v2_2_0/core.nwb.image.yaml | 2 + .../linkml/core/v2_2_0/core.nwb.misc.yaml | 5 + .../linkml/core/v2_2_0/core.nwb.ophys.yaml | 10 + .../core/v2_2_0/core.nwb.retinotopy.yaml | 12 + .../linkml/core/v2_2_1/core.nwb.base.yaml | 16 + .../linkml/core/v2_2_1/core.nwb.behavior.yaml | 2 + .../linkml/core/v2_2_1/core.nwb.device.yaml | 2 + .../linkml/core/v2_2_1/core.nwb.ecephys.yaml | 2 + .../linkml/core/v2_2_1/core.nwb.file.yaml | 4 + .../linkml/core/v2_2_1/core.nwb.icephys.yaml | 36 +++ .../linkml/core/v2_2_1/core.nwb.image.yaml | 2 + .../linkml/core/v2_2_1/core.nwb.misc.yaml | 5 + .../linkml/core/v2_2_1/core.nwb.ophys.yaml | 10 + .../core/v2_2_1/core.nwb.retinotopy.yaml | 12 + .../linkml/core/v2_2_2/core.nwb.base.yaml | 16 + .../linkml/core/v2_2_2/core.nwb.behavior.yaml | 2 + .../linkml/core/v2_2_2/core.nwb.device.yaml | 2 + .../linkml/core/v2_2_2/core.nwb.ecephys.yaml | 2 + .../linkml/core/v2_2_2/core.nwb.file.yaml | 4 + .../linkml/core/v2_2_2/core.nwb.icephys.yaml | 36 +++ .../linkml/core/v2_2_2/core.nwb.image.yaml | 2 + .../linkml/core/v2_2_2/core.nwb.misc.yaml | 5 + .../linkml/core/v2_2_2/core.nwb.ophys.yaml | 10 + .../core/v2_2_2/core.nwb.retinotopy.yaml | 37 +++ .../linkml/core/v2_2_4/core.nwb.base.yaml | 16 + .../linkml/core/v2_2_4/core.nwb.behavior.yaml | 2 + .../linkml/core/v2_2_4/core.nwb.device.yaml | 2 + .../linkml/core/v2_2_4/core.nwb.ecephys.yaml | 2 + .../linkml/core/v2_2_4/core.nwb.file.yaml | 5 + .../linkml/core/v2_2_4/core.nwb.icephys.yaml | 36 +++ .../linkml/core/v2_2_4/core.nwb.image.yaml | 2 + .../linkml/core/v2_2_4/core.nwb.misc.yaml | 5 + .../linkml/core/v2_2_4/core.nwb.ophys.yaml | 10 + .../core/v2_2_4/core.nwb.retinotopy.yaml | 37 +++ .../linkml/core/v2_2_5/core.nwb.base.yaml | 16 + .../linkml/core/v2_2_5/core.nwb.behavior.yaml | 2 + .../linkml/core/v2_2_5/core.nwb.device.yaml | 2 + .../linkml/core/v2_2_5/core.nwb.ecephys.yaml | 2 + .../linkml/core/v2_2_5/core.nwb.file.yaml | 5 + .../linkml/core/v2_2_5/core.nwb.icephys.yaml | 36 +++ .../linkml/core/v2_2_5/core.nwb.image.yaml | 2 + .../linkml/core/v2_2_5/core.nwb.misc.yaml | 5 + .../linkml/core/v2_2_5/core.nwb.ophys.yaml | 10 + .../core/v2_2_5/core.nwb.retinotopy.yaml | 37 +++ .../linkml/core/v2_3_0/core.nwb.base.yaml | 17 + .../linkml/core/v2_3_0/core.nwb.behavior.yaml | 2 + .../linkml/core/v2_3_0/core.nwb.device.yaml | 2 + .../linkml/core/v2_3_0/core.nwb.ecephys.yaml | 3 + .../linkml/core/v2_3_0/core.nwb.file.yaml | 5 + .../linkml/core/v2_3_0/core.nwb.icephys.yaml | 39 +++ .../linkml/core/v2_3_0/core.nwb.image.yaml | 2 + .../linkml/core/v2_3_0/core.nwb.misc.yaml | 5 + .../linkml/core/v2_3_0/core.nwb.ophys.yaml | 10 + .../core/v2_3_0/core.nwb.retinotopy.yaml | 37 +++ .../linkml/core/v2_4_0/core.nwb.base.yaml | 17 + .../linkml/core/v2_4_0/core.nwb.behavior.yaml | 2 + .../linkml/core/v2_4_0/core.nwb.device.yaml | 2 + .../linkml/core/v2_4_0/core.nwb.ecephys.yaml | 3 + .../linkml/core/v2_4_0/core.nwb.file.yaml | 5 + .../linkml/core/v2_4_0/core.nwb.icephys.yaml | 59 ++++ .../linkml/core/v2_4_0/core.nwb.image.yaml | 2 + .../linkml/core/v2_4_0/core.nwb.misc.yaml | 5 + .../linkml/core/v2_4_0/core.nwb.ophys.yaml | 10 + .../core/v2_4_0/core.nwb.retinotopy.yaml | 37 +++ .../linkml/core/v2_5_0/core.nwb.base.yaml | 18 ++ .../linkml/core/v2_5_0/core.nwb.behavior.yaml | 2 + .../linkml/core/v2_5_0/core.nwb.device.yaml | 2 + .../linkml/core/v2_5_0/core.nwb.ecephys.yaml | 3 + .../linkml/core/v2_5_0/core.nwb.file.yaml | 5 + .../linkml/core/v2_5_0/core.nwb.icephys.yaml | 59 ++++ .../linkml/core/v2_5_0/core.nwb.image.yaml | 2 + .../linkml/core/v2_5_0/core.nwb.misc.yaml | 5 + .../linkml/core/v2_5_0/core.nwb.ophys.yaml | 10 + .../core/v2_5_0/core.nwb.retinotopy.yaml | 37 +++ .../core/v2_6_0_alpha/core.nwb.base.yaml | 18 ++ .../core/v2_6_0_alpha/core.nwb.behavior.yaml | 2 + .../core/v2_6_0_alpha/core.nwb.device.yaml | 2 + .../core/v2_6_0_alpha/core.nwb.ecephys.yaml | 3 + .../core/v2_6_0_alpha/core.nwb.file.yaml | 7 + .../core/v2_6_0_alpha/core.nwb.icephys.yaml | 59 ++++ .../core/v2_6_0_alpha/core.nwb.image.yaml | 2 + .../core/v2_6_0_alpha/core.nwb.misc.yaml | 5 + .../core/v2_6_0_alpha/core.nwb.ophys.yaml | 16 + .../v2_6_0_alpha/core.nwb.retinotopy.yaml | 37 +++ .../linkml/core/v2_7_0/core.nwb.base.yaml | 18 ++ .../linkml/core/v2_7_0/core.nwb.behavior.yaml | 2 + .../linkml/core/v2_7_0/core.nwb.device.yaml | 2 + .../linkml/core/v2_7_0/core.nwb.ecephys.yaml | 3 + .../linkml/core/v2_7_0/core.nwb.file.yaml | 7 + .../linkml/core/v2_7_0/core.nwb.icephys.yaml | 59 ++++ .../linkml/core/v2_7_0/core.nwb.image.yaml | 2 + .../linkml/core/v2_7_0/core.nwb.misc.yaml | 5 + .../linkml/core/v2_7_0/core.nwb.ophys.yaml | 16 + .../core/v2_7_0/core.nwb.retinotopy.yaml | 37 +++ .../v1_1_0/hdmf-common.sparse.yaml | 6 + .../hdmf_common/v1_1_0/hdmf-common.table.yaml | 8 + .../v1_1_2/hdmf-common.sparse.yaml | 6 + .../hdmf_common/v1_1_2/hdmf-common.table.yaml | 8 + .../v1_1_3/hdmf-common.sparse.yaml | 6 + .../hdmf_common/v1_1_3/hdmf-common.table.yaml | 8 + .../v1_5_0/hdmf-common.sparse.yaml | 2 + .../hdmf_common/v1_5_0/hdmf-common.table.yaml | 7 + .../v1_8_0/hdmf-common.sparse.yaml | 2 + .../hdmf_common/v1_8_0/hdmf-common.table.yaml | 7 + .../hdmf-experimental.experimental.yaml | 1 + .../hdmf-experimental.experimental.yaml | 1 + 234 files changed, 4772 insertions(+), 2222 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py index 7557e3c..99b5906 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -130,10 +130,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -178,15 +183,17 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[ @@ -212,9 +219,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -260,9 +271,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py index 8d5d808..addd1ff 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index 34f77fd..3e3205f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -130,10 +130,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -198,10 +203,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -369,9 +379,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py index 8627b8a..d7ec41b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -150,13 +150,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -194,9 +192,7 @@ class TimeIntervalsTimeseries(VectorData): timeseries: Optional[TimeSeries] = Field( None, description="""the TimeSeries that this index applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py index 15fdadc..f0e65b0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -98,9 +98,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.1.0"] = Field( + "2.1.0", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.1.0", "ifabsent": "string(2.1.0)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -305,7 +306,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -486,13 +487,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py index 1242dfc..8d42b4d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -107,8 +107,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -127,10 +127,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -170,8 +175,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -195,8 +200,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -214,10 +219,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -257,9 +267,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -280,8 +291,8 @@ class IZeroClampSeries(CurrentClampSeries): ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -299,10 +310,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -342,8 +358,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -361,10 +377,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -404,9 +425,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -443,8 +467,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -462,10 +486,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -505,9 +534,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -528,9 +560,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -551,9 +586,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -574,9 +612,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -597,9 +636,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -620,9 +662,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -643,9 +688,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -666,9 +714,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -684,8 +733,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -703,10 +752,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -746,9 +800,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -824,13 +879,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py index 8f16e8e..77e40f9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -159,10 +159,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -204,8 +209,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -251,10 +256,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -324,10 +334,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -380,10 +395,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py index 9c79866..3a9affd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -296,10 +312,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -339,9 +360,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -403,13 +425,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -513,13 +533,11 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -550,9 +568,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py index f75e675..4a46e2c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -91,10 +91,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py index 062517f..8a134f9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -146,10 +146,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -203,10 +208,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -342,12 +352,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -370,8 +382,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, @@ -401,8 +415,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index dcb153d..5cbf6b0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -96,11 +96,11 @@ class RetinotopyMap(NWBData): ) name: str = Field(...) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -119,20 +119,18 @@ class AxisMap(RetinotopyMap): ) name: str = Field(...) - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") class RetinotopyImage(GrayscaleImage): @@ -145,18 +143,16 @@ class RetinotopyImage(GrayscaleImage): ) name: str = Field(...) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) @@ -286,19 +282,17 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): } }, ) - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - bits_per_pixel: Optional[int] = Field( - None, + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py index 228d72f..c8c1162 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -130,10 +130,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -178,15 +183,17 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[ @@ -212,9 +219,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -260,9 +271,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py index 25d55da..45af85e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index eaee082..d337bbe 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -130,10 +130,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -198,10 +203,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -369,9 +379,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py index b80640c..cfd4e53 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -150,13 +150,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -194,9 +192,7 @@ class TimeIntervalsTimeseries(VectorData): timeseries: Optional[TimeSeries] = Field( None, description="""the TimeSeries that this index applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py index 092eaf4..a17f8e1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -98,9 +98,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.2.1"] = Field( + "2.2.1", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.2.1", "ifabsent": "string(2.2.1)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -305,7 +306,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -486,13 +487,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py index 0109b14..5be0500 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -107,8 +107,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -127,10 +127,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -170,8 +175,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -195,8 +200,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -214,10 +219,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -257,9 +267,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -280,8 +291,8 @@ class IZeroClampSeries(CurrentClampSeries): ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -299,10 +310,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -342,8 +358,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -361,10 +377,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -404,9 +425,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -443,8 +467,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -462,10 +486,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -505,9 +534,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -528,9 +560,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -551,9 +586,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -574,9 +612,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -597,9 +636,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -620,9 +662,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -643,9 +688,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -666,9 +714,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -684,8 +733,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -703,10 +752,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -746,9 +800,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -824,13 +879,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py index e831ea1..97e4aa8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -159,10 +159,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -204,8 +209,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -251,10 +256,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -324,10 +334,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -380,10 +395,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py index 19c418d..9f114c6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -296,10 +312,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -339,9 +360,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -403,13 +425,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -513,13 +533,11 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -550,9 +568,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py index 37ff5c3..e94dead 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -91,10 +91,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py index ba8d78a..6ad59b8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -146,10 +146,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -203,10 +208,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -342,12 +352,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -370,8 +382,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, @@ -401,8 +415,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index 98f0ede..2c68f1f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -96,11 +96,11 @@ class RetinotopyMap(NWBData): ) name: str = Field(...) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -119,20 +119,18 @@ class AxisMap(RetinotopyMap): ) name: str = Field(...) - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ "linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}, {"alias": "num_cols"}]}} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") class RetinotopyImage(GrayscaleImage): @@ -145,18 +143,16 @@ class RetinotopyImage(GrayscaleImage): ) name: str = Field(...) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) @@ -286,19 +282,17 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): } }, ) - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - bits_per_pixel: Optional[int] = Field( - None, + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py index 14eaf16..8dbf06a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -130,10 +130,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -178,15 +183,17 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[ @@ -212,9 +219,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -260,9 +271,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py index d19da8f..9984cf5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index 06bad97..fa2e7d2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -130,10 +130,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -198,10 +203,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -369,9 +379,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py index 11b17cf..0464db5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -150,13 +150,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -194,9 +192,7 @@ class TimeIntervalsTimeseries(VectorData): timeseries: Optional[TimeSeries] = Field( None, description="""the TimeSeries that this index applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py index 128b7a4..ef18f50 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -98,9 +98,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.2.2"] = Field( + "2.2.2", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.2.2", "ifabsent": "string(2.2.2)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -305,7 +306,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -486,13 +487,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py index d48fcbd..29a500f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -107,8 +107,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -127,10 +127,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -170,8 +175,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -195,8 +200,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -214,10 +219,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -257,9 +267,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -280,8 +291,8 @@ class IZeroClampSeries(CurrentClampSeries): ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -299,10 +310,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -342,8 +358,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -361,10 +377,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -404,9 +425,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -443,8 +467,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -462,10 +486,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -505,9 +534,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -528,9 +560,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -551,9 +586,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -574,9 +612,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -597,9 +636,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -620,9 +662,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -643,9 +688,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -666,9 +714,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -684,8 +733,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -703,10 +752,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -746,9 +800,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -824,13 +879,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py index b161444..2bafac7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -159,10 +159,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -204,8 +209,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -251,10 +256,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -322,10 +332,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -378,10 +393,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py index cfdfb58..7485685 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -296,10 +312,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -339,9 +360,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -403,13 +425,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -513,13 +533,11 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -550,9 +568,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py index 2b0d4f5..cfd0933 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -91,10 +91,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py index e4c1979..48ebb24 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -146,10 +146,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -203,10 +208,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -342,12 +352,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -370,8 +382,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, @@ -401,8 +415,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index c2d0c5f..976d7f2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -127,14 +127,12 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -159,14 +157,12 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -191,14 +187,12 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -223,14 +217,12 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -255,19 +247,17 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ @@ -289,11 +279,11 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -318,18 +308,16 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py index bb4ef4e..4e8eeca 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -130,10 +130,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -178,15 +183,17 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[ @@ -212,9 +219,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -260,9 +271,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py index 093f0b8..9609061 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index 0138a95..58080df 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -130,10 +130,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -198,10 +203,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -369,9 +379,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 60ec786..a086155 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -150,13 +150,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -194,9 +192,7 @@ class TimeIntervalsTimeseries(VectorData): timeseries: Optional[TimeSeries] = Field( None, description="""the TimeSeries that this index applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py index 0b716c0..4e54cb8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -96,9 +96,7 @@ class ScratchData(NWBData): ) name: str = Field(...) - notes: Optional[str] = Field( - None, description="""Any notes the user has about the dataset being stored""" - ) + notes: str = Field(..., description="""Any notes the user has about the dataset being stored""") class NWBFile(NWBContainer): @@ -114,9 +112,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.2.4"] = Field( + "2.2.4", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.2.4", "ifabsent": "string(2.2.4)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -321,7 +320,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -464,13 +463,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py index 0af6814..0749cca 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -107,8 +107,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -127,10 +127,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -170,8 +175,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -195,8 +200,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -214,10 +219,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -257,9 +267,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -280,8 +291,8 @@ class IZeroClampSeries(CurrentClampSeries): ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -299,10 +310,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -342,8 +358,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -361,10 +377,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -404,9 +425,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -443,8 +467,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -462,10 +486,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -505,9 +534,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -528,9 +560,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -551,9 +586,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -574,9 +612,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -597,9 +636,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -620,9 +662,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -643,9 +688,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -666,9 +714,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -684,8 +733,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -703,10 +752,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -746,9 +800,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -824,13 +879,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py index a1ababb..6d493d6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -159,10 +159,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -204,8 +209,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -251,10 +256,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -322,10 +332,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -378,10 +393,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py index 51b8cfc..fda23ec 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -296,10 +312,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -339,9 +360,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -403,13 +425,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -513,13 +533,11 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -550,9 +568,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py index 73f6642..9616063 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -91,10 +91,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py index 3c2ebd8..01ecf48 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -151,10 +151,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -208,10 +213,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -345,13 +355,11 @@ class PlaneSegmentation(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -378,9 +386,7 @@ class PlaneSegmentationImageMask(VectorData): "linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"} }, ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -407,9 +413,7 @@ class PlaneSegmentationPixelMask(VectorData): x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the pixel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -437,9 +441,7 @@ class PlaneSegmentationVoxelMask(VectorData): y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the voxel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -515,12 +517,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -543,8 +547,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, @@ -574,8 +580,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[NDArray[Shape["2 x_y, 3 x_y_z"], float]] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index fc5368f..f7b47de 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -127,14 +127,12 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -159,14 +157,12 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -191,14 +187,12 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -223,14 +217,12 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -255,19 +247,17 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ @@ -289,11 +279,11 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -318,18 +308,16 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py index 64796e9..99ff2d4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -130,10 +130,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -178,15 +183,17 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[ @@ -212,9 +219,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -260,9 +271,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py index 77b111a..da7c068 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index 99b6083..4151cf7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -130,10 +130,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -198,10 +203,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -369,9 +379,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py index ad80c60..900bbeb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -150,13 +150,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -194,9 +192,7 @@ class TimeIntervalsTimeseries(VectorData): timeseries: Optional[TimeSeries] = Field( None, description="""the TimeSeries that this index applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py index a0344e1..1aab54d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -96,9 +96,7 @@ class ScratchData(NWBData): ) name: str = Field(...) - notes: Optional[str] = Field( - None, description="""Any notes the user has about the dataset being stored""" - ) + notes: str = Field(..., description="""Any notes the user has about the dataset being stored""") class NWBFile(NWBContainer): @@ -114,9 +112,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.2.5"] = Field( + "2.2.5", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.2.5", "ifabsent": "string(2.2.5)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -321,7 +320,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -464,13 +463,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py index 0cdf6b1..b99d4ca 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -107,8 +107,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -127,10 +127,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -170,8 +175,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -195,8 +200,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -214,10 +219,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -257,9 +267,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -280,8 +291,8 @@ class IZeroClampSeries(CurrentClampSeries): ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) data: CurrentClampSeriesData = Field(..., description="""Recorded voltage.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -299,10 +310,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -342,8 +358,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -361,10 +377,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -404,9 +425,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -443,8 +467,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -462,10 +486,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -505,9 +534,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -528,9 +560,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -551,9 +586,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -574,9 +612,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -597,9 +636,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -620,9 +662,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -643,9 +688,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -666,9 +714,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -684,8 +733,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -703,10 +752,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -746,9 +800,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -824,13 +879,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py index e6790fa..98d7ef4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -159,10 +159,15 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -204,8 +209,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -251,10 +256,15 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -322,10 +332,15 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -378,10 +393,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py index c3f845e..591e521 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -296,10 +312,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -339,9 +360,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -403,13 +425,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -513,13 +533,11 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -550,9 +568,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py index c30f06b..c419baf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -91,10 +91,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py index 6849817..3bb6356 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -153,10 +153,15 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -210,10 +215,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -347,13 +357,11 @@ class PlaneSegmentation(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -380,9 +388,7 @@ class PlaneSegmentationImageMask(VectorData): "linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"} }, ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -409,9 +415,7 @@ class PlaneSegmentationPixelMask(VectorData): x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the pixel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -439,9 +443,7 @@ class PlaneSegmentationVoxelMask(VectorData): y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the voxel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -517,12 +519,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -545,8 +549,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) @@ -566,8 +572,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index 79dacf3..3b83056 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -127,14 +127,12 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -159,14 +157,12 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -191,14 +187,12 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -223,14 +217,12 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -255,19 +247,17 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ @@ -289,11 +279,11 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -318,18 +308,16 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py index c3683bc..8ce2390 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -135,10 +135,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -183,15 +188,17 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) continuity: Optional[str] = Field( @@ -221,9 +228,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -269,9 +280,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py index a469b16..9c8a529 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index 19de749..c3b57c3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -134,10 +134,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -206,10 +211,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -377,9 +387,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py index adfa639..d82e78d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -150,13 +150,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -191,9 +189,7 @@ class TimeIntervalsTimeseries(VectorData): timeseries: Optional[TimeSeries] = Field( None, description="""the TimeSeries that this index applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py index 03e0af6..987b15e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -96,9 +96,7 @@ class ScratchData(NWBData): ) name: str = Field(...) - notes: Optional[str] = Field( - None, description="""Any notes the user has about the dataset being stored""" - ) + notes: str = Field(..., description="""Any notes the user has about the dataset being stored""") class NWBFile(NWBContainer): @@ -114,9 +112,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.3.0"] = Field( + "2.3.0", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.3.0", "ifabsent": "string(2.3.0)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -321,7 +320,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -464,13 +463,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py index 2c8abe2..6cb3a16 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -107,8 +107,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -127,10 +127,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -170,8 +175,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -195,8 +200,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -214,10 +219,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -257,9 +267,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -274,9 +285,10 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, + stimulus_description: Literal["N/A"] = Field( + "N/A", description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", + json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}}, ) bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") @@ -300,10 +312,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -343,8 +360,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -362,10 +379,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -405,9 +427,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -444,8 +469,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -463,10 +488,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -506,9 +536,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -529,9 +562,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -552,9 +588,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -575,9 +614,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -598,9 +638,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -621,9 +664,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -644,9 +690,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -667,9 +716,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -685,8 +735,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -704,10 +754,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -747,9 +802,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -825,13 +881,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py index fbe685b..8425dcc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -169,10 +169,15 @@ class ImageSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -214,8 +219,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -270,10 +275,15 @@ class ImageMaskSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -350,10 +360,15 @@ class OpticalSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -406,10 +421,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py index cbc95a2..c7336be 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -308,10 +324,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -351,9 +372,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -415,13 +437,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -555,13 +575,11 @@ class Units(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -589,9 +607,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py index aa317fb..56f099a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -91,10 +91,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py index 48ffbff..1aab4fb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -162,10 +162,15 @@ class TwoPhotonSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -219,10 +224,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -356,13 +366,11 @@ class PlaneSegmentation(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -386,9 +394,7 @@ class PlaneSegmentationImageMask(VectorData): "linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"} }, ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -415,9 +421,7 @@ class PlaneSegmentationPixelMask(VectorData): x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the pixel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -445,9 +449,7 @@ class PlaneSegmentationVoxelMask(VectorData): y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the voxel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -523,12 +525,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -551,8 +555,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) @@ -572,8 +578,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 3f2c3c9..26e6c7d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -127,14 +127,12 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -159,14 +157,12 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -191,14 +187,12 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -223,14 +217,12 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -255,19 +247,17 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ @@ -289,11 +279,11 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -318,18 +308,16 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py index 38da923..c7e83cd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -99,9 +99,7 @@ class TimeSeriesReferenceVectorData(VectorData): description="""Number of data samples available in this time series, during this epoch""", ) timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -169,10 +167,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -217,15 +220,17 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) continuity: Optional[str] = Field( @@ -255,9 +260,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -303,9 +312,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py index 2d75ba0..7251865 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index 7099d5a..8130e81 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -134,10 +134,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -206,10 +211,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -377,9 +387,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 3a0c3fb..8828e93 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -150,13 +150,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -191,9 +189,7 @@ class TimeIntervalsTimeseries(VectorData): timeseries: Optional[TimeSeries] = Field( None, description="""the TimeSeries that this index applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py index de70d34..b74e8b8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -104,9 +104,7 @@ class ScratchData(NWBData): ) name: str = Field(...) - notes: Optional[str] = Field( - None, description="""Any notes the user has about the dataset being stored""" - ) + notes: str = Field(..., description="""Any notes the user has about the dataset being stored""") class NWBFile(NWBContainer): @@ -122,9 +120,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.4.0"] = Field( + "2.4.0", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.4.0", "ifabsent": "string(2.4.0)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -329,7 +328,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -472,13 +471,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py index 8d6b370..d21047d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -114,8 +114,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -134,10 +134,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -177,8 +182,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -202,8 +207,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -221,10 +226,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -264,9 +274,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -281,9 +292,10 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, + stimulus_description: Literal["N/A"] = Field( + "N/A", description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", + json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}}, ) bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") @@ -307,10 +319,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -350,8 +367,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -369,10 +386,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -412,9 +434,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -451,8 +476,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -470,10 +495,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -513,9 +543,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -536,9 +569,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -559,9 +595,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -582,9 +621,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -605,9 +645,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -628,9 +671,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -651,9 +697,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -674,9 +723,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -692,8 +742,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -711,10 +761,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -754,9 +809,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -832,13 +888,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -859,14 +913,21 @@ class IntracellularElectrodesTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular electrode related metadata."] = Field( + "Table for storing intracellular electrode related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular electrode related metadata.", + "ifabsent": "string(Table for storing intracellular electrode related metadata.)", + } + }, ) electrode: List[IntracellularElectrode] = Field( ..., description="""Column for storing the reference to the intracellular electrode.""" ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -889,8 +950,15 @@ class IntracellularStimuliTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular stimulus related metadata."] = Field( + "Table for storing intracellular stimulus related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular stimulus related metadata.", + "ifabsent": "string(Table for storing intracellular stimulus related metadata.)", + } + }, ) stimulus: Named[TimeSeriesReferenceVectorData] = Field( ..., @@ -904,8 +972,8 @@ class IntracellularStimuliTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -928,8 +996,15 @@ class IntracellularResponsesTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular response related metadata."] = Field( + "Table for storing intracellular response related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular response related metadata.", + "ifabsent": "string(Table for storing intracellular response related metadata.)", + } + }, ) response: Named[TimeSeriesReferenceVectorData] = Field( ..., @@ -943,8 +1018,8 @@ class IntracellularResponsesTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -975,9 +1050,27 @@ class IntracellularRecordingsTable(AlignedDynamicTable): } }, ) - description: Optional[str] = Field( - None, + description: Literal[ + "A table to group together a stimulus and response from a single electrode and a single" + " simultaneous recording and for storing metadata about the intracellular recording." + ] = Field( + "A table to group together a stimulus and response from a single electrode and a single" + " simultaneous recording and for storing metadata about the intracellular recording.", description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": ( + "A table to group together a stimulus and response from a " + "single electrode and a single simultaneous recording and " + "for storing metadata about the intracellular recording." + ), + "ifabsent": ( + "string(A table to group together a stimulus and response from a " + "single electrode and a single simultaneous recording and for " + "storing metadata about the intracellular recording.)" + ), + } + }, ) electrodes: IntracellularElectrodesTable = Field( ..., description="""Table for storing intracellular electrode related metadata.""" @@ -991,8 +1084,8 @@ class IntracellularRecordingsTable(AlignedDynamicTable): value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -1039,13 +1132,11 @@ class SimultaneousRecordingsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1069,12 +1160,12 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion): "linkml_meta": {"equals_string": "recordings", "ifabsent": "string(recordings)"} }, ) - table: Optional[IntracellularRecordingsTable] = Field( - None, + table: IntracellularRecordingsTable = Field( + ..., description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1129,13 +1220,11 @@ class SequentialRecordingsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1162,12 +1251,12 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): } }, ) - table: Optional[SimultaneousRecordingsTable] = Field( - None, + table: SimultaneousRecordingsTable = Field( + ..., description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1210,13 +1299,11 @@ class RepetitionsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1243,12 +1330,12 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion): } }, ) - table: Optional[SequentialRecordingsTable] = Field( - None, + table: SequentialRecordingsTable = Field( + ..., description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1293,13 +1380,11 @@ class ExperimentalConditionsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1323,12 +1408,12 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion): "linkml_meta": {"equals_string": "repetitions", "ifabsent": "string(repetitions)"} }, ) - table: Optional[RepetitionsTable] = Field( - None, + table: RepetitionsTable = Field( + ..., description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py index 249545b..631f809 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -169,10 +169,15 @@ class ImageSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -214,8 +219,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -270,10 +275,15 @@ class ImageMaskSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -350,10 +360,15 @@ class OpticalSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -406,10 +421,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py index 804a7c2..b9919ef 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -308,10 +324,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -351,9 +372,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -415,13 +437,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -555,13 +575,11 @@ class Units(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -589,9 +607,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py index 1720289..da627b1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -91,10 +91,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 9fabdb8..47b323c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -162,10 +162,15 @@ class TwoPhotonSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -219,10 +224,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -356,13 +366,11 @@ class PlaneSegmentation(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -386,9 +394,7 @@ class PlaneSegmentationImageMask(VectorData): "linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"} }, ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -415,9 +421,7 @@ class PlaneSegmentationPixelMask(VectorData): x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the pixel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -445,9 +449,7 @@ class PlaneSegmentationVoxelMask(VectorData): y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the voxel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -523,12 +525,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -551,8 +555,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) @@ -572,8 +578,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index f75b79b..825df96 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -127,14 +127,12 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -159,14 +157,12 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -191,14 +187,12 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -223,14 +217,12 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -255,19 +247,17 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ @@ -289,11 +279,11 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -318,18 +308,16 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py index 0f2fc41..ab1c061 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -123,9 +123,7 @@ class TimeSeriesReferenceVectorData(VectorData): description="""Number of data samples available in this time series, during this epoch""", ) timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -214,10 +212,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -262,19 +265,21 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) offset: Optional[float] = Field( None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) continuity: Optional[str] = Field( @@ -304,9 +309,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -352,9 +361,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") order_of_images: Named[Optional[ImageReferences]] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py index ffe0f14..c295739 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index 997828f..74c571f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -134,10 +134,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -206,10 +211,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -377,9 +387,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py index c399761..9181482 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -159,13 +159,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py index 541ba5e..dc3dd77 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -105,9 +105,7 @@ class ScratchData(NWBData): ) name: str = Field(...) - notes: Optional[str] = Field( - None, description="""Any notes the user has about the dataset being stored""" - ) + notes: str = Field(..., description="""Any notes the user has about the dataset being stored""") class NWBFile(NWBContainer): @@ -123,9 +121,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.5.0"] = Field( + "2.5.0", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.5.0", "ifabsent": "string(2.5.0)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -332,7 +331,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -475,13 +474,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py index b1c9fce..e5ab584 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -114,8 +114,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -134,10 +134,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -177,8 +182,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -202,8 +207,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -221,10 +226,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -264,9 +274,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -281,9 +292,10 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, + stimulus_description: Literal["N/A"] = Field( + "N/A", description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", + json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}}, ) bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") @@ -307,10 +319,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -350,8 +367,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -369,10 +386,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -412,9 +434,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -451,8 +476,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -470,10 +495,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -513,9 +543,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -536,9 +569,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -559,9 +595,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -582,9 +621,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -605,9 +645,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -628,9 +671,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -651,9 +697,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -674,9 +723,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -692,8 +742,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -711,10 +761,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -754,9 +809,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -833,13 +889,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -860,14 +914,21 @@ class IntracellularElectrodesTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular electrode related metadata."] = Field( + "Table for storing intracellular electrode related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular electrode related metadata.", + "ifabsent": "string(Table for storing intracellular electrode related metadata.)", + } + }, ) electrode: List[IntracellularElectrode] = Field( ..., description="""Column for storing the reference to the intracellular electrode.""" ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -890,8 +951,15 @@ class IntracellularStimuliTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular stimulus related metadata."] = Field( + "Table for storing intracellular stimulus related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular stimulus related metadata.", + "ifabsent": "string(Table for storing intracellular stimulus related metadata.)", + } + }, ) stimulus: Named[TimeSeriesReferenceVectorData] = Field( ..., @@ -905,8 +973,8 @@ class IntracellularStimuliTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -929,8 +997,15 @@ class IntracellularResponsesTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular response related metadata."] = Field( + "Table for storing intracellular response related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular response related metadata.", + "ifabsent": "string(Table for storing intracellular response related metadata.)", + } + }, ) response: Named[TimeSeriesReferenceVectorData] = Field( ..., @@ -944,8 +1019,8 @@ class IntracellularResponsesTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -976,9 +1051,27 @@ class IntracellularRecordingsTable(AlignedDynamicTable): } }, ) - description: Optional[str] = Field( - None, + description: Literal[ + "A table to group together a stimulus and response from a single electrode and a single" + " simultaneous recording and for storing metadata about the intracellular recording." + ] = Field( + "A table to group together a stimulus and response from a single electrode and a single" + " simultaneous recording and for storing metadata about the intracellular recording.", description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": ( + "A table to group together a stimulus and response from a " + "single electrode and a single simultaneous recording and " + "for storing metadata about the intracellular recording." + ), + "ifabsent": ( + "string(A table to group together a stimulus and response from a " + "single electrode and a single simultaneous recording and for " + "storing metadata about the intracellular recording.)" + ), + } + }, ) electrodes: IntracellularElectrodesTable = Field( ..., description="""Table for storing intracellular electrode related metadata.""" @@ -992,8 +1085,8 @@ class IntracellularRecordingsTable(AlignedDynamicTable): value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -1040,13 +1133,11 @@ class SimultaneousRecordingsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1070,12 +1161,12 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion): "linkml_meta": {"equals_string": "recordings", "ifabsent": "string(recordings)"} }, ) - table: Optional[IntracellularRecordingsTable] = Field( - None, + table: IntracellularRecordingsTable = Field( + ..., description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1130,13 +1221,11 @@ class SequentialRecordingsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1163,12 +1252,12 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): } }, ) - table: Optional[SimultaneousRecordingsTable] = Field( - None, + table: SimultaneousRecordingsTable = Field( + ..., description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1211,13 +1300,11 @@ class RepetitionsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1244,12 +1331,12 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion): } }, ) - table: Optional[SequentialRecordingsTable] = Field( - None, + table: SequentialRecordingsTable = Field( + ..., description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1294,13 +1381,11 @@ class ExperimentalConditionsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1324,12 +1409,12 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion): "linkml_meta": {"equals_string": "repetitions", "ifabsent": "string(repetitions)"} }, ) - table: Optional[RepetitionsTable] = Field( - None, + table: RepetitionsTable = Field( + ..., description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py index 91baa83..d92729b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -175,10 +175,15 @@ class ImageSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -220,8 +225,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -276,10 +281,15 @@ class ImageMaskSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -356,10 +366,15 @@ class OpticalSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -421,10 +436,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py index 2ac3d3f..8a8baaf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -308,10 +324,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -351,9 +372,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -415,13 +437,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -555,13 +575,11 @@ class Units(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -589,9 +607,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py index 9ffaa75..0cfb470 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -91,10 +91,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py index 01c7c0d..a07c17a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -162,10 +162,15 @@ class TwoPhotonSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -219,10 +224,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -356,13 +366,11 @@ class PlaneSegmentation(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -386,9 +394,7 @@ class PlaneSegmentationImageMask(VectorData): "linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"} }, ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -415,9 +421,7 @@ class PlaneSegmentationPixelMask(VectorData): x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the pixel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -445,9 +449,7 @@ class PlaneSegmentationVoxelMask(VectorData): y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the voxel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -523,12 +525,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -551,8 +555,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) @@ -572,8 +578,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index 50bbf36..7a3b066 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -127,14 +127,12 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -159,14 +157,12 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -191,14 +187,12 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -223,14 +217,12 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -255,19 +247,17 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ @@ -289,11 +279,11 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -318,18 +308,16 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 8a9ce62..08f2b79 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -123,9 +123,7 @@ class TimeSeriesReferenceVectorData(VectorData): description="""Number of data samples available in this time series, during this epoch""", ) timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -214,10 +212,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -262,19 +265,21 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) offset: Optional[float] = Field( None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) continuity: Optional[str] = Field( @@ -304,9 +309,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -352,9 +361,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") order_of_images: Named[Optional[ImageReferences]] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index e4310c8..08d5d2b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index 65b1d87..a3371dd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -134,10 +134,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -206,10 +211,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -377,9 +387,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index 4adcf8b..0ec3f6b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -159,13 +159,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index e1521bd..be8be33 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -105,9 +105,7 @@ class ScratchData(NWBData): ) name: str = Field(...) - notes: Optional[str] = Field( - None, description="""Any notes the user has about the dataset being stored""" - ) + notes: str = Field(..., description="""Any notes the user has about the dataset being stored""") class NWBFile(NWBContainer): @@ -123,9 +121,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.6.0"] = Field( + "2.6.0", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.6.0", "ifabsent": "string(2.6.0)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -332,7 +331,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -475,13 +474,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -630,8 +627,9 @@ class SubjectAge(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "age", "ifabsent": "string(age)"}}, ) reference: Optional[str] = Field( - None, + "birth", description="""Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(birth)"}}, ) value: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index 9795c3a..ea2c7ec 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -114,8 +114,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -134,10 +134,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -177,8 +182,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -202,8 +207,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -221,10 +226,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -264,9 +274,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -281,9 +292,10 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, + stimulus_description: Literal["N/A"] = Field( + "N/A", description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", + json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}}, ) bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") @@ -307,10 +319,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -350,8 +367,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -369,10 +386,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -412,9 +434,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -451,8 +476,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -470,10 +495,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -513,9 +543,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -536,9 +569,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -559,9 +595,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -582,9 +621,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -605,9 +645,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -628,9 +671,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -651,9 +697,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -674,9 +723,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -692,8 +742,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -711,10 +761,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -754,9 +809,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -833,13 +889,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -860,14 +914,21 @@ class IntracellularElectrodesTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular electrode related metadata."] = Field( + "Table for storing intracellular electrode related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular electrode related metadata.", + "ifabsent": "string(Table for storing intracellular electrode related metadata.)", + } + }, ) electrode: List[IntracellularElectrode] = Field( ..., description="""Column for storing the reference to the intracellular electrode.""" ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -890,8 +951,15 @@ class IntracellularStimuliTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular stimulus related metadata."] = Field( + "Table for storing intracellular stimulus related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular stimulus related metadata.", + "ifabsent": "string(Table for storing intracellular stimulus related metadata.)", + } + }, ) stimulus: Named[TimeSeriesReferenceVectorData] = Field( ..., @@ -905,8 +973,8 @@ class IntracellularStimuliTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -929,8 +997,15 @@ class IntracellularResponsesTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular response related metadata."] = Field( + "Table for storing intracellular response related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular response related metadata.", + "ifabsent": "string(Table for storing intracellular response related metadata.)", + } + }, ) response: Named[TimeSeriesReferenceVectorData] = Field( ..., @@ -944,8 +1019,8 @@ class IntracellularResponsesTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -976,9 +1051,27 @@ class IntracellularRecordingsTable(AlignedDynamicTable): } }, ) - description: Optional[str] = Field( - None, + description: Literal[ + "A table to group together a stimulus and response from a single electrode and a single" + " simultaneous recording and for storing metadata about the intracellular recording." + ] = Field( + "A table to group together a stimulus and response from a single electrode and a single" + " simultaneous recording and for storing metadata about the intracellular recording.", description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": ( + "A table to group together a stimulus and response from a " + "single electrode and a single simultaneous recording and " + "for storing metadata about the intracellular recording." + ), + "ifabsent": ( + "string(A table to group together a stimulus and response from a " + "single electrode and a single simultaneous recording and for " + "storing metadata about the intracellular recording.)" + ), + } + }, ) electrodes: IntracellularElectrodesTable = Field( ..., description="""Table for storing intracellular electrode related metadata.""" @@ -992,8 +1085,8 @@ class IntracellularRecordingsTable(AlignedDynamicTable): value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -1040,13 +1133,11 @@ class SimultaneousRecordingsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1070,12 +1161,12 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion): "linkml_meta": {"equals_string": "recordings", "ifabsent": "string(recordings)"} }, ) - table: Optional[IntracellularRecordingsTable] = Field( - None, + table: IntracellularRecordingsTable = Field( + ..., description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1130,13 +1221,11 @@ class SequentialRecordingsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1163,12 +1252,12 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): } }, ) - table: Optional[SimultaneousRecordingsTable] = Field( - None, + table: SimultaneousRecordingsTable = Field( + ..., description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1211,13 +1300,11 @@ class RepetitionsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1244,12 +1331,12 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion): } }, ) - table: Optional[SequentialRecordingsTable] = Field( - None, + table: SequentialRecordingsTable = Field( + ..., description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1294,13 +1381,11 @@ class ExperimentalConditionsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1324,12 +1409,12 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion): "linkml_meta": {"equals_string": "repetitions", "ifabsent": "string(repetitions)"} }, ) - table: Optional[RepetitionsTable] = Field( - None, + table: RepetitionsTable = Field( + ..., description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index dd490ec..07a02dc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -175,10 +175,15 @@ class ImageSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -220,8 +225,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -276,10 +281,15 @@ class ImageMaskSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -356,10 +366,15 @@ class OpticalSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -421,10 +436,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index 2235224..a8b3e9a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -308,10 +324,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -351,9 +372,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -415,13 +437,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -555,13 +575,11 @@ class Units(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -589,9 +607,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index b4db178..66baef6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -91,10 +91,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index 9e6c34a..22d467f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -167,10 +167,15 @@ class OnePhotonSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -256,10 +261,15 @@ class TwoPhotonSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -313,10 +323,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -450,13 +465,11 @@ class PlaneSegmentation(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -480,9 +493,7 @@ class PlaneSegmentationImageMask(VectorData): "linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"} }, ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -509,9 +520,7 @@ class PlaneSegmentationPixelMask(VectorData): x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the pixel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -539,9 +548,7 @@ class PlaneSegmentationVoxelMask(VectorData): y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the voxel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -617,12 +624,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -645,8 +654,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) @@ -666,8 +677,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index 9dd7994..75922c5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -127,14 +127,12 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -159,14 +157,12 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -191,14 +187,12 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -223,14 +217,12 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -255,19 +247,17 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ @@ -289,11 +279,11 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -318,18 +308,16 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py index 38c4e21..1e2d4a5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -123,9 +123,7 @@ class TimeSeriesReferenceVectorData(VectorData): description="""Number of data samples available in this time series, during this epoch""", ) timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -214,10 +212,15 @@ class TimeSeries(NWBDataInterface): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) data: TimeSeriesData = Field( ..., @@ -262,19 +265,21 @@ class TimeSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) offset: Optional[float] = Field( None, description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", ) resolution: Optional[float] = Field( - None, + -1.0, description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(-1.0)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) continuity: Optional[str] = Field( @@ -304,9 +309,13 @@ class TimeSeriesStartingTime(ConfiguredBaseModel): "linkml_meta": {"equals_string": "starting_time", "ifabsent": "string(starting_time)"} }, ) - rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") - unit: Optional[str] = Field( - None, description="""Unit of measurement for time, which is fixed to 'seconds'.""" + rate: float = Field(..., description="""Sampling rate, in Hz.""") + unit: Literal["seconds"] = Field( + "seconds", + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "seconds", "ifabsent": "string(seconds)"} + }, ) value: float = Field(...) @@ -352,9 +361,7 @@ class Images(NWBDataInterface): ) name: str = Field("Images", json_schema_extra={"linkml_meta": {"ifabsent": "string(Images)"}}) - description: Optional[str] = Field( - None, description="""Description of this collection of images.""" - ) + description: str = Field(..., description="""Description of this collection of images.""") image: List[Image] = Field(..., description="""Images stored in this collection.""") order_of_images: Named[Optional[ImageReferences]] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py index f1ea1a0..e8c8a80 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -84,10 +84,15 @@ class SpatialSeries(TimeSeries): reference_frame: Optional[str] = Field( None, description="""Description defining what exactly 'straight-ahead' means.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -128,8 +133,9 @@ class SpatialSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index 2a92c91..f772257 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -134,10 +134,15 @@ class ElectricalSeries(TimeSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -206,10 +211,15 @@ class SpikeEventSeries(ElectricalSeries): description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_channels"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -377,9 +387,9 @@ class ElectrodeGroup(NWBContainer): ) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") - location: Optional[str] = Field( - None, + description: str = Field(..., description="""Description of this electrode group.""") + location: str = Field( + ..., description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) position: Optional[ElectrodeGroupPosition] = Field( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py index 44cc59e..0eb0390 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -159,13 +159,11 @@ class TimeIntervals(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py index 5d2c034..ef1a1e6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -105,9 +105,7 @@ class ScratchData(NWBData): ) name: str = Field(...) - notes: Optional[str] = Field( - None, description="""Any notes the user has about the dataset being stored""" - ) + notes: str = Field(..., description="""Any notes the user has about the dataset being stored""") class NWBFile(NWBContainer): @@ -123,9 +121,10 @@ class NWBFile(NWBContainer): "root", json_schema_extra={"linkml_meta": {"equals_string": "root", "ifabsent": "string(root)"}}, ) - nwb_version: Optional[str] = Field( - None, + nwb_version: Literal["2.7.0"] = Field( + "2.7.0", description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + json_schema_extra={"linkml_meta": {"equals_string": "2.7.0", "ifabsent": "string(2.7.0)"}}, ) file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( ..., @@ -340,7 +339,7 @@ class GeneralSourceScript(ConfiguredBaseModel): "linkml_meta": {"equals_string": "source_script", "ifabsent": "string(source_script)"} }, ) - file_name: Optional[str] = Field(None, description="""Name of script file.""") + file_name: str = Field(..., description="""Name of script file.""") value: str = Field(...) @@ -483,13 +482,11 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -638,8 +635,9 @@ class SubjectAge(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "age", "ifabsent": "string(age)"}}, ) reference: Optional[str] = Field( - None, + "birth", description="""Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(birth)"}}, ) value: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py index 23a0ff2..577ddb8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -114,8 +114,8 @@ class PatchClampSeries(TimeSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -134,10 +134,15 @@ class PatchClampSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -177,8 +182,8 @@ class PatchClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + ..., description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", ) value: Optional[NDArray[Shape["* num_times"], float]] = Field( @@ -202,8 +207,8 @@ class CurrentClampSeries(PatchClampSeries): capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -221,10 +226,15 @@ class CurrentClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -264,9 +274,10 @@ class CurrentClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -281,9 +292,10 @@ class IZeroClampSeries(CurrentClampSeries): ) name: str = Field(...) - stimulus_description: Optional[str] = Field( - None, + stimulus_description: Literal["N/A"] = Field( + "N/A", description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", + json_schema_extra={"linkml_meta": {"equals_string": "N/A", "ifabsent": "string(N/A)"}}, ) bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") @@ -307,10 +319,15 @@ class IZeroClampSeries(CurrentClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -350,8 +367,8 @@ class CurrentClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: CurrentClampStimulusSeriesData = Field(..., description="""Stimulus current applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -369,10 +386,15 @@ class CurrentClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -412,9 +434,12 @@ class CurrentClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -451,8 +476,8 @@ class VoltageClampSeries(PatchClampSeries): whole_cell_series_resistance_comp: Optional[VoltageClampSeriesWholeCellSeriesResistanceComp] = ( Field(None, description="""Whole cell series resistance compensation, in ohms.""") ) - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -470,10 +495,15 @@ class VoltageClampSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -513,9 +543,12 @@ class VoltageClampSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["amperes"] = Field( + "amperes", description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "amperes", "ifabsent": "string(amperes)"} + }, ) value: Any = Field(...) @@ -536,9 +569,12 @@ class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -559,9 +595,12 @@ class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -582,9 +621,10 @@ class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["hertz"] = Field( + "hertz", description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + json_schema_extra={"linkml_meta": {"equals_string": "hertz", "ifabsent": "string(hertz)"}}, ) value: float = Field(...) @@ -605,9 +645,12 @@ class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -628,9 +671,12 @@ class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["percent"] = Field( + "percent", description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "percent", "ifabsent": "string(percent)"} + }, ) value: float = Field(...) @@ -651,9 +697,12 @@ class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["farads"] = Field( + "farads", description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + json_schema_extra={ + "linkml_meta": {"equals_string": "farads", "ifabsent": "string(farads)"} + }, ) value: float = Field(...) @@ -674,9 +723,10 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): } }, ) - unit: Optional[str] = Field( - None, + unit: Literal["ohms"] = Field( + "ohms", description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + json_schema_extra={"linkml_meta": {"equals_string": "ohms", "ifabsent": "string(ohms)"}}, ) value: float = Field(...) @@ -692,8 +742,8 @@ class VoltageClampStimulusSeries(PatchClampSeries): name: str = Field(...) data: VoltageClampStimulusSeriesData = Field(..., description="""Stimulus voltage applied.""") - stimulus_description: Optional[str] = Field( - None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + stimulus_description: str = Field( + ..., description="""Protocol/stimulus name for this patch-clamp dataset.""" ) sweep_number: Optional[int] = Field( None, description="""Sweep number, allows to group different PatchClampSeries together.""" @@ -711,10 +761,15 @@ class VoltageClampStimulusSeries(PatchClampSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -754,9 +809,10 @@ class VoltageClampStimulusSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: Literal["volts"] = Field( + "volts", description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + json_schema_extra={"linkml_meta": {"equals_string": "volts", "ifabsent": "string(volts)"}}, ) value: Any = Field(...) @@ -833,13 +889,11 @@ class SweepTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -860,14 +914,21 @@ class IntracellularElectrodesTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular electrode related metadata."] = Field( + "Table for storing intracellular electrode related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular electrode related metadata.", + "ifabsent": "string(Table for storing intracellular electrode related metadata.)", + } + }, ) electrode: List[IntracellularElectrode] = Field( ..., description="""Column for storing the reference to the intracellular electrode.""" ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -890,8 +951,15 @@ class IntracellularStimuliTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular stimulus related metadata."] = Field( + "Table for storing intracellular stimulus related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular stimulus related metadata.", + "ifabsent": "string(Table for storing intracellular stimulus related metadata.)", + } + }, ) stimulus: Named[TimeSeriesReferenceVectorData] = Field( ..., @@ -917,8 +985,8 @@ class IntracellularStimuliTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -941,8 +1009,15 @@ class IntracellularResponsesTable(DynamicTable): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" + description: Literal["Table for storing intracellular response related metadata."] = Field( + "Table for storing intracellular response related metadata.", + description="""Description of what is in this dynamic table.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": "Table for storing intracellular response related metadata.", + "ifabsent": "string(Table for storing intracellular response related metadata.)", + } + }, ) response: Named[TimeSeriesReferenceVectorData] = Field( ..., @@ -956,8 +1031,8 @@ class IntracellularResponsesTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -988,9 +1063,27 @@ class IntracellularRecordingsTable(AlignedDynamicTable): } }, ) - description: Optional[str] = Field( - None, + description: Literal[ + "A table to group together a stimulus and response from a single electrode and a single" + " simultaneous recording and for storing metadata about the intracellular recording." + ] = Field( + "A table to group together a stimulus and response from a single electrode and a single" + " simultaneous recording and for storing metadata about the intracellular recording.", description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""", + json_schema_extra={ + "linkml_meta": { + "equals_string": ( + "A table to group together a stimulus and response from a " + "single electrode and a single simultaneous recording and " + "for storing metadata about the intracellular recording." + ), + "ifabsent": ( + "string(A table to group together a stimulus and response from a " + "single electrode and a single simultaneous recording and for " + "storing metadata about the intracellular recording.)" + ), + } + }, ) electrodes: IntracellularElectrodesTable = Field( ..., description="""Table for storing intracellular electrode related metadata.""" @@ -1004,8 +1097,8 @@ class IntracellularRecordingsTable(AlignedDynamicTable): value: Optional[List[DynamicTable]] = Field( None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) id: NDArray[Shape["* num_rows"], int] = Field( @@ -1052,13 +1145,11 @@ class SimultaneousRecordingsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1082,12 +1173,12 @@ class SimultaneousRecordingsTableRecordings(DynamicTableRegion): "linkml_meta": {"equals_string": "recordings", "ifabsent": "string(recordings)"} }, ) - table: Optional[IntracellularRecordingsTable] = Field( - None, + table: IntracellularRecordingsTable = Field( + ..., description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1142,13 +1233,11 @@ class SequentialRecordingsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1175,12 +1264,12 @@ class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): } }, ) - table: Optional[SimultaneousRecordingsTable] = Field( - None, + table: SimultaneousRecordingsTable = Field( + ..., description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1223,13 +1312,11 @@ class RepetitionsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1256,12 +1343,12 @@ class RepetitionsTableSequentialRecordings(DynamicTableRegion): } }, ) - table: Optional[SequentialRecordingsTable] = Field( - None, + table: SequentialRecordingsTable = Field( + ..., description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -1306,13 +1393,11 @@ class ExperimentalConditionsTable(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -1336,12 +1421,12 @@ class ExperimentalConditionsTableRepetitions(DynamicTableRegion): "linkml_meta": {"equals_string": "repetitions", "ifabsent": "string(repetitions)"} }, ) - table: Optional[RepetitionsTable] = Field( - None, + table: RepetitionsTable = Field( + ..., description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py index e43675b..69a8617 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -175,10 +175,15 @@ class ImageSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -220,8 +225,8 @@ class ImageSeriesExternalFile(ConfiguredBaseModel): "linkml_meta": {"equals_string": "external_file", "ifabsent": "string(external_file)"} }, ) - starting_frame: Optional[int] = Field( - None, + starting_frame: List[int] = Field( + ..., description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", ) value: Optional[NDArray[Shape["* num_files"], str]] = Field( @@ -276,10 +281,15 @@ class ImageMaskSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -356,10 +366,15 @@ class OpticalSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -421,10 +436,15 @@ class IndexSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py index c4b1c2e..57697a3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -120,10 +120,15 @@ class AbstractFeatureSeries(TimeSeries): description="""Description of the features represented in TimeSeries::data.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_features"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -164,8 +169,9 @@ class AbstractFeatureSeriesData(ConfiguredBaseModel): json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) unit: Optional[str] = Field( - None, + "see 'feature_units'", description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(see 'feature_units')"}}, ) value: Optional[ Union[ @@ -190,10 +196,15 @@ class AnnotationSeries(TimeSeries): description="""Annotations made during an experiment.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -237,10 +248,15 @@ class IntervalSeries(TimeSeries): description="""Use values >0 if interval started, <0 if interval ended.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_times"}]}}}, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -308,10 +324,15 @@ class DecompositionSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -351,9 +372,10 @@ class DecompositionSeriesData(ConfiguredBaseModel): "data", json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, ) - unit: Optional[str] = Field( - None, + unit: str = Field( + "no unit", description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no unit)"}}, ) value: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field( None, @@ -415,13 +437,11 @@ class DecompositionSeriesBands(DynamicTable): description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -555,13 +575,11 @@ class Units(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -589,9 +607,7 @@ class UnitsSpikeTimes(VectorData): None, description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py index e575a80..d77dcba 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -92,10 +92,15 @@ class OptogeneticSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py index daeed24..d734f11 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -167,10 +167,15 @@ class OnePhotonSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -256,10 +261,15 @@ class TwoPhotonSeries(ImageSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -313,10 +323,15 @@ class RoiResponseSeries(TimeSeries): } }, ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + "no description", + description="""Description of the time series.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no description)"}}, + ) comments: Optional[str] = Field( - None, + "no comments", description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(no comments)"}}, ) starting_time: Optional[TimeSeriesStartingTime] = Field( None, @@ -450,13 +465,11 @@ class PlaneSegmentation(DynamicTable): } }, ) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -480,9 +493,7 @@ class PlaneSegmentationImageMask(VectorData): "linkml_meta": {"equals_string": "image_mask", "ifabsent": "string(image_mask)"} }, ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -509,9 +520,7 @@ class PlaneSegmentationPixelMask(VectorData): x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the pixel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -539,9 +548,7 @@ class PlaneSegmentationVoxelMask(VectorData): y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") weight: Optional[float] = Field(None, description="""Weight of the voxel.""") - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -617,12 +624,14 @@ class ImagingPlaneManifold(ConfiguredBaseModel): }, ) conversion: Optional[float] = Field( - None, + 1.0, description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.""", + json_schema_extra={"linkml_meta": {"ifabsent": "float(1.0)"}}, ) unit: Optional[str] = Field( - None, + "meters", description="""Base unit of measurement for working with the data. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[ Union[ @@ -645,8 +654,10 @@ class ImagingPlaneOriginCoords(ConfiguredBaseModel): "linkml_meta": {"equals_string": "origin_coords", "ifabsent": "string(origin_coords)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for origin_coords. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for origin_coords. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) @@ -666,8 +677,10 @@ class ImagingPlaneGridSpacing(ConfiguredBaseModel): "linkml_meta": {"equals_string": "grid_spacing", "ifabsent": "string(grid_spacing)"} }, ) - unit: Optional[str] = Field( - None, description="""Measurement units for grid_spacing. The default value is 'meters'.""" + unit: str = Field( + "meters", + description="""Measurement units for grid_spacing. The default value is 'meters'.""", + json_schema_extra={"linkml_meta": {"ifabsent": "string(meters)"}}, ) value: Optional[Union[NDArray[Shape["2 x_y"], float], NDArray[Shape["3 x_y_z"], float]]] = ( Field(None) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py index 40c1f40..0942bac 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -127,14 +127,12 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -159,14 +157,12 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -191,14 +187,12 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -223,14 +217,12 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): } }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - unit: Optional[str] = Field( - None, description="""Unit that axis data is stored in (e.g., degrees).""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + unit: str = Field(..., description="""Unit that axis data is stored in (e.g., degrees).""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -255,19 +247,17 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + focal_depth: float = Field(..., description="""Focal depth offset, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ @@ -289,11 +279,11 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): "linkml_meta": {"equals_string": "sign_map", "ifabsent": "string(sign_map)"} }, ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field( None, json_schema_extra={ @@ -318,18 +308,16 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): } }, ) - bits_per_pixel: Optional[int] = Field( - None, + bits_per_pixel: int = Field( + ..., description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", ) - dimension: Optional[int] = Field( - None, + dimension: List[int] = Field( + ..., description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - format: Optional[str] = Field( - None, description="""Format of image. Right now only 'raw' is supported.""" - ) + field_of_view: List[float] = Field(..., description="""Size of viewing area, in meters.""") + format: str = Field(..., description="""Format of image. Right now only 'raw' is supported.""") value: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field( None, json_schema_extra={ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py index 2ba56a5..af77c6c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py @@ -7,6 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from numpydantic import NDArray, Shape metamodel_version = "None" version = "1.1.0" @@ -44,6 +45,7 @@ class LinkMLMeta(RootModel): return key in self.root +NUMPYDANTIC_VERSION = "1.2.1" linkml_meta = LinkMLMeta( { "annotations": { @@ -68,7 +70,13 @@ class CSRMatrix(ConfiguredBaseModel): ) name: str = Field(...) - shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") + shape: NDArray[Shape["2 null"], int] = Field( + ..., + description="""the shape of this sparse matrix""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}} + }, + ) indices: CSRMatrixIndices = Field(..., description="""column indices""") indptr: CSRMatrixIndptr = Field(..., description="""index pointer""") data: CSRMatrixData = Field(..., description="""values in the matrix""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 93d1574..0647a30 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -314,9 +314,7 @@ class Index(Data): ) name: str = Field(...) - target: Optional[Data] = Field( - None, description="""Target dataset that this index applies to.""" - ) + target: Data = Field(..., description="""Target dataset that this index applies to.""") class VectorData(VectorDataMixin): @@ -329,9 +327,7 @@ class VectorData(VectorDataMixin): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") class VectorIndex(VectorIndexMixin): @@ -344,8 +340,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" ) @@ -373,11 +369,11 @@ class DynamicTableRegion(VectorData): ) name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) @@ -403,13 +399,11 @@ class DynamicTable(DynamicTableMixin): ) name: str = Field(...) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py index 32401a4..8c05020 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py @@ -7,6 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from numpydantic import NDArray, Shape metamodel_version = "None" version = "1.1.2" @@ -44,6 +45,7 @@ class LinkMLMeta(RootModel): return key in self.root +NUMPYDANTIC_VERSION = "1.2.1" linkml_meta = LinkMLMeta( { "annotations": { @@ -68,7 +70,13 @@ class CSRMatrix(ConfiguredBaseModel): ) name: str = Field(...) - shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") + shape: NDArray[Shape["2 null"], int] = Field( + ..., + description="""the shape of this sparse matrix""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}} + }, + ) indices: CSRMatrixIndices = Field(..., description="""column indices""") indptr: CSRMatrixIndptr = Field(..., description="""index pointer""") data: CSRMatrixData = Field(..., description="""values in the matrix""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 25748ee..21fc9c0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -314,9 +314,7 @@ class Index(Data): ) name: str = Field(...) - target: Optional[Data] = Field( - None, description="""Target dataset that this index applies to.""" - ) + target: Data = Field(..., description="""Target dataset that this index applies to.""") class VectorData(VectorDataMixin): @@ -329,9 +327,7 @@ class VectorData(VectorDataMixin): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") class VectorIndex(VectorIndexMixin): @@ -344,8 +340,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" ) @@ -373,11 +369,11 @@ class DynamicTableRegion(VectorData): ) name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) @@ -403,13 +399,11 @@ class DynamicTable(DynamicTableMixin): ) name: str = Field(...) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py index 872d645..c473269 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py @@ -7,6 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from numpydantic import NDArray, Shape metamodel_version = "None" version = "1.1.3" @@ -44,6 +45,7 @@ class LinkMLMeta(RootModel): return key in self.root +NUMPYDANTIC_VERSION = "1.2.1" linkml_meta = LinkMLMeta( { "annotations": { @@ -68,7 +70,13 @@ class CSRMatrix(ConfiguredBaseModel): ) name: str = Field(...) - shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") + shape: NDArray[Shape["2 null"], int] = Field( + ..., + description="""the shape of this sparse matrix""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}} + }, + ) indices: CSRMatrixIndices = Field(..., description="""column indices""") indptr: CSRMatrixIndptr = Field(..., description="""index pointer""") data: CSRMatrixData = Field(..., description="""values in the matrix""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 3d4762c..0a4ed6e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -314,9 +314,7 @@ class Index(Data): ) name: str = Field(...) - target: Optional[Data] = Field( - None, description="""Target dataset that this index applies to.""" - ) + target: Data = Field(..., description="""Target dataset that this index applies to.""") class VectorData(VectorDataMixin): @@ -329,9 +327,7 @@ class VectorData(VectorDataMixin): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -352,8 +348,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" ) value: Optional[NDArray[Shape["* num_rows"], Any]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}} @@ -384,11 +380,11 @@ class DynamicTableRegion(VectorData): ) name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -422,13 +418,11 @@ class DynamicTable(DynamicTableMixin): ) name: str = Field(...) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py index 49f08da..e261b0a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py @@ -71,8 +71,8 @@ class CSRMatrix(Container): ) name: str = Field(...) - shape: Optional[int] = Field( - None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" + shape: List[int] = Field( + ..., description="""The shape (number of rows, number of columns) of this sparse matrix.""" ) indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field( ..., diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 61e5ba2..294f168 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -303,9 +303,7 @@ class VectorData(VectorDataMixin): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -326,12 +324,10 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -366,11 +362,11 @@ class DynamicTableRegion(VectorData): ) name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -392,13 +388,11 @@ class DynamicTable(DynamicTableMixin): ) name: str = Field(...) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -422,13 +416,11 @@ class AlignedDynamicTable(DynamicTable): None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) name: str = Field(...) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py index 56fad93..248869e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py @@ -71,8 +71,8 @@ class CSRMatrix(Container): ) name: str = Field(...) - shape: Optional[int] = Field( - None, description="""The shape (number of rows, number of columns) of this sparse matrix.""" + shape: List[int] = Field( + ..., description="""The shape (number of rows, number of columns) of this sparse matrix.""" ) indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field( ..., diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index e1e413c..6fa0e8c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -303,9 +303,7 @@ class VectorData(VectorDataMixin): ) name: str = Field(...) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -326,12 +324,10 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: Optional[VectorData] = Field( - None, description="""Reference to the target dataset that this index applies to.""" - ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], @@ -366,11 +362,11 @@ class DynamicTableRegion(VectorData): ) name: str = Field(...) - table: Optional[DynamicTable] = Field( - None, description="""Reference to the DynamicTable object that this region applies to.""" + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" ) - description: Optional[str] = Field( - None, description="""Description of what this table region points to.""" + description: str = Field( + ..., description="""Description of what this table region points to.""" ) value: Optional[ Union[ @@ -392,13 +388,11 @@ class DynamicTable(DynamicTableMixin): ) name: str = Field(...) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", @@ -422,13 +416,11 @@ class AlignedDynamicTable(DynamicTable): None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} ) name: str = Field(...) - colnames: Optional[str] = Field( - None, + colnames: List[str] = Field( + ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - description: Optional[str] = Field( - None, description="""Description of what is in this dynamic table.""" - ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") id: NDArray[Shape["* num_rows"], int] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index 135f2a3..0303e13 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -71,13 +71,11 @@ class EnumData(VectorData): ) name: str = Field(...) - elements: Optional[VectorData] = Field( - None, + elements: VectorData = Field( + ..., description="""Reference to the VectorData object that contains the enumerable elements""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py index a7e6936..57e1a37 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py @@ -71,13 +71,11 @@ class EnumData(VectorData): ) name: str = Field(...) - elements: Optional[VectorData] = Field( - None, + elements: VectorData = Field( + ..., description="""Reference to the VectorData object that contains the enumerable elements""", ) - description: Optional[str] = Field( - None, description="""Description of what these vectors represent.""" - ) + description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ NDArray[Shape["* dim0"], Any], diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml index b2eebf1..f7d7f50 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.base.yaml @@ -38,10 +38,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -97,13 +99,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -188,20 +194,25 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value range: AnyType @@ -240,10 +251,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -291,6 +306,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml index 4e5ffa0..cb41d79 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.behavior.yaml @@ -61,7 +61,9 @@ classes: description: Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.device.yaml index ec5c6bc..1b7492f 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml index 5af96b3..7d61f52 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml @@ -295,12 +295,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml index 3dd86fc..6550555 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.file.yaml @@ -37,7 +37,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.1.0) range: text + required: true + equals_string: 2.1.0 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -443,6 +446,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml index d12f7e1..dc70464 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value array: @@ -131,7 +134,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -198,7 +204,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -278,7 +287,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -296,7 +308,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -314,7 +329,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -333,7 +351,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -352,7 +373,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -371,7 +395,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -390,7 +417,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -409,7 +439,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -445,7 +478,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml index 271f77d..6920484 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.image.yaml @@ -132,6 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml index 92edf08..4337a4e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -186,7 +188,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -384,3 +388,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml index d44aaa2..1183186 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ophys.yaml @@ -29,12 +29,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -277,12 +279,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -315,7 +321,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value array: @@ -340,7 +348,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml index f3a06cd..3150687 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.retinotopy.yaml @@ -28,10 +28,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -54,6 +58,7 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -77,19 +82,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true tree_root: true ImagingRetinotopy: name: ImagingRetinotopy @@ -223,3 +234,4 @@ classes: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml index 234bbc7..c81fea9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.base.yaml @@ -38,10 +38,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -97,13 +99,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -188,20 +194,25 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value range: AnyType @@ -240,10 +251,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -291,6 +306,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml index 03ba313..c555f1a 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.behavior.yaml @@ -61,7 +61,9 @@ classes: description: Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.device.yaml index fdb85e6..e0969c8 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml index 2226676..d5aaa6e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml @@ -295,12 +295,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml index 504a763..2b01096 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.file.yaml @@ -37,7 +37,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.2.1) range: text + required: true + equals_string: 2.2.1 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -443,6 +446,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml index 42fa48f..11c4893 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value array: @@ -131,7 +134,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -198,7 +204,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -278,7 +287,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -296,7 +308,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -314,7 +329,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -333,7 +351,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -352,7 +373,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -371,7 +395,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -390,7 +417,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -409,7 +439,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -445,7 +478,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml index c74d614..7f406bc 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.image.yaml @@ -132,6 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml index 82a8100..59ff81b 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -186,7 +188,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -384,3 +388,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml index d5e6b39..45df3d6 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ophys.yaml @@ -29,12 +29,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -277,12 +279,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -315,7 +321,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value array: @@ -340,7 +348,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml index 5a552ed..8543f50 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.retinotopy.yaml @@ -28,10 +28,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -54,6 +58,7 @@ classes: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -77,19 +82,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true tree_root: true ImagingRetinotopy: name: ImagingRetinotopy @@ -223,3 +234,4 @@ classes: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml index bf4ee39..771c828 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.base.yaml @@ -38,10 +38,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -97,13 +99,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -188,20 +194,25 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value range: AnyType @@ -240,10 +251,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -291,6 +306,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml index 11f271b..b95e9da 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.behavior.yaml @@ -61,7 +61,9 @@ classes: description: Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.device.yaml index a436b52..2f26c6a 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml index 83d97ad..6641150 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml @@ -295,12 +295,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml index 3a250d5..1186898 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.file.yaml @@ -37,7 +37,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.2.2) range: text + required: true + equals_string: 2.2.2 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -443,6 +446,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml index 756714c..a9c82e9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value array: @@ -131,7 +134,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -198,7 +204,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -278,7 +287,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -296,7 +308,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -314,7 +329,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -333,7 +351,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -352,7 +373,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -371,7 +395,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -390,7 +417,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -409,7 +439,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -445,7 +478,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml index 7cb22bc..1e11ca4 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.image.yaml @@ -132,6 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml index 79c8173..de3d137 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -186,7 +188,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -384,3 +388,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml index 727c05e..a5923c9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ophys.yaml @@ -29,12 +29,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -277,12 +279,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -315,7 +321,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value array: @@ -340,7 +348,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml index 2e53599..5248fa1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.retinotopy.yaml @@ -106,14 +106,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -137,14 +142,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -167,14 +177,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -198,14 +213,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -229,23 +249,30 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true focal_depth: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: @@ -269,10 +296,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -296,19 +327,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml index 8e458a3..6a09101 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.base.yaml @@ -38,10 +38,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -97,13 +99,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -188,20 +194,25 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value range: AnyType @@ -240,10 +251,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -291,6 +306,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml index e6ae7ad..836b4eb 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.behavior.yaml @@ -61,7 +61,9 @@ classes: description: Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.device.yaml index e8f28d6..62fc686 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml index 8195921..8acdc93 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml @@ -295,12 +295,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml index 1e98c12..45add51 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.file.yaml @@ -34,6 +34,7 @@ classes: name: notes description: Any notes the user has about the dataset being stored range: text + required: true tree_root: true NWBFile: name: NWBFile @@ -51,7 +52,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.2.4) range: text + required: true + equals_string: 2.2.4 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -457,6 +461,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml index fc1459d..200456d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value array: @@ -131,7 +134,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -198,7 +204,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -278,7 +287,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -296,7 +308,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -314,7 +329,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -333,7 +351,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -352,7 +373,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -371,7 +395,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -390,7 +417,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -409,7 +439,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -445,7 +478,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml index 38a564e..4beec01 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.image.yaml @@ -132,6 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml index 62fda1f..82d4a9f 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -186,7 +188,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -384,3 +388,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml index c02f6d8..73f7c6f 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml @@ -29,12 +29,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -436,12 +438,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -474,7 +480,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value array: @@ -499,7 +507,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml index 7cf81d7..5869564 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.retinotopy.yaml @@ -106,14 +106,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -137,14 +142,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -167,14 +177,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -198,14 +213,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -229,23 +249,30 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true focal_depth: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: @@ -269,10 +296,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -296,19 +327,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml index e3161a4..477b107 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.base.yaml @@ -38,10 +38,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -97,13 +99,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -188,20 +194,25 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value range: AnyType @@ -240,10 +251,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -291,6 +306,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml index 65f31ca..97c06d8 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.behavior.yaml @@ -61,7 +61,9 @@ classes: description: Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.device.yaml index 32b7d65..9572c3b 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml index 9d008a1..0e11ce2 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml @@ -295,12 +295,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml index efe69a8..f52421b 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.file.yaml @@ -34,6 +34,7 @@ classes: name: notes description: Any notes the user has about the dataset being stored range: text + required: true tree_root: true NWBFile: name: NWBFile @@ -51,7 +52,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.2.5) range: text + required: true + equals_string: 2.2.5 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -457,6 +461,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml index 4aa8c39..9eb505a 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value array: @@ -131,7 +134,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -198,7 +204,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -278,7 +287,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -296,7 +308,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -314,7 +329,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -333,7 +351,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -352,7 +373,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -371,7 +395,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -390,7 +417,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -409,7 +439,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -445,7 +478,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml index 6d564d7..4218d3b 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.image.yaml @@ -132,6 +132,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml index bbfbc48..f163348 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -186,7 +188,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -384,3 +388,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml index 6218538..6613637 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml @@ -29,12 +29,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -436,12 +438,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -474,7 +480,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 @@ -502,7 +510,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml index 4fe5360..457b1cc 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.retinotopy.yaml @@ -106,14 +106,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -137,14 +142,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -167,14 +177,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -198,14 +213,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -229,23 +249,30 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true focal_depth: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: @@ -269,10 +296,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -296,19 +327,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml index 8249ab1..ab7eabf 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.base.yaml @@ -39,10 +39,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -98,13 +100,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -189,20 +195,25 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true continuity: name: continuity description: Optionally describe the continuity of the data. Can be "continuous", @@ -215,6 +226,7 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text + required: false value: name: value range: AnyType @@ -253,10 +265,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -304,6 +320,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml index 755fecd..07bb957 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.behavior.yaml @@ -61,7 +61,9 @@ classes: description: Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.device.yaml index c0f4e17..7881fcf 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml index 4b9767f..12ad322 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml @@ -37,6 +37,7 @@ classes: at 300 Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible. range: text + required: false data: name: data description: Recorded voltage data. @@ -305,12 +306,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml index 75dc8f9..0b76f4f 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.file.yaml @@ -34,6 +34,7 @@ classes: name: notes description: Any notes the user has about the dataset being stored range: text + required: true tree_root: true NWBFile: name: NWBFile @@ -51,7 +52,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.3.0) range: text + required: true + equals_string: 2.3.0 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -457,6 +461,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml index 9e14aec..d93bb52 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value array: @@ -131,7 +134,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -152,7 +158,10 @@ classes: name: stimulus_description description: An IZeroClampSeries has no stimulus, so this attribute is automatically set to "N/A" + ifabsent: string(N/A) range: text + required: true + equals_string: N/A bias_current: name: bias_current description: Bias current, in amps, fixed to 0.0. @@ -203,7 +212,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -283,7 +295,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -301,7 +316,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -319,7 +337,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -338,7 +359,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -357,7 +381,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -376,7 +403,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -395,7 +425,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -414,7 +447,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -450,7 +486,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml index 0b93830..bbbcfce 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.image.yaml @@ -144,6 +144,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml index e1d24e2..89d5ee0 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -200,7 +202,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -458,3 +462,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml index 15cb79a..9e86441 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml @@ -29,12 +29,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -436,12 +438,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -474,7 +480,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 @@ -502,7 +510,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml index 98473cb..cc06e90 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.retinotopy.yaml @@ -106,14 +106,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -137,14 +142,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -167,14 +177,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -198,14 +213,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -229,23 +249,30 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true focal_depth: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: @@ -269,10 +296,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -296,19 +327,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml index 77f05be..8ba1ca7 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml @@ -73,10 +73,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -132,13 +134,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -223,20 +229,25 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true continuity: name: continuity description: Optionally describe the continuity of the data. Can be "continuous", @@ -249,6 +260,7 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text + required: false value: name: value range: AnyType @@ -287,10 +299,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -338,6 +354,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml index 1ed3fa3..322a4d9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.behavior.yaml @@ -61,7 +61,9 @@ classes: description: Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.device.yaml index fda15e6..fc320af 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml index 43a8eb9..e572bd1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml @@ -37,6 +37,7 @@ classes: at 300 Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible. range: text + required: false data: name: data description: Recorded voltage data. @@ -305,12 +306,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml index a1bd2ec..13bf8a1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.file.yaml @@ -34,6 +34,7 @@ classes: name: notes description: Any notes the user has about the dataset being stored range: text + required: true tree_root: true NWBFile: name: NWBFile @@ -51,7 +52,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.4.0) range: text + required: true + equals_string: 2.4.0 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -457,6 +461,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml index 89b6b6d..346751e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. range: text + required: true value: name: value array: @@ -131,7 +134,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -152,7 +158,10 @@ classes: name: stimulus_description description: An IZeroClampSeries has no stimulus, so this attribute is automatically set to "N/A" + ifabsent: string(N/A) range: text + required: true + equals_string: N/A bias_current: name: bias_current description: Bias current, in amps, fixed to 0.0. @@ -203,7 +212,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -283,7 +295,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -301,7 +316,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -319,7 +337,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -338,7 +359,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -357,7 +381,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -376,7 +403,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -395,7 +425,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -414,7 +447,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -450,7 +486,10 @@ classes: description: Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -573,7 +612,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular electrode related metadata.) range: text + required: true + equals_string: Table for storing intracellular electrode related metadata. electrode: name: electrode description: Column for storing the reference to the intracellular electrode. @@ -593,7 +635,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular stimulus related metadata.) range: text + required: true + equals_string: Table for storing intracellular stimulus related metadata. stimulus: name: stimulus annotations: @@ -621,7 +666,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular response related metadata.) range: text + required: true + equals_string: Table for storing intracellular response related metadata. response: name: response annotations: @@ -662,7 +710,14 @@ classes: name: description description: Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute. + ifabsent: string(A table to group together a stimulus and response from a + single electrode and a single simultaneous recording and for storing metadata + about the intracellular recording.) range: text + required: true + equals_string: A table to group together a stimulus and response from a single + electrode and a single simultaneous recording and for storing metadata about + the intracellular recording. electrodes: name: electrodes description: Table for storing intracellular electrode related metadata. @@ -734,6 +789,7 @@ classes: table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: IntracellularRecordingsTable + required: true SequentialRecordingsTable: name: SequentialRecordingsTable description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable @@ -796,6 +852,7 @@ classes: table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: SimultaneousRecordingsTable + required: true RepetitionsTable: name: RepetitionsTable description: A table for grouping different sequential intracellular recordings @@ -849,6 +906,7 @@ classes: region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: SequentialRecordingsTable + required: true ExperimentalConditionsTable: name: ExperimentalConditionsTable description: A table for grouping different intracellular recording repetitions @@ -898,3 +956,4 @@ classes: applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: RepetitionsTable + required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml index dd88ab9..ac28a30 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.image.yaml @@ -145,6 +145,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml index d416ce7..97927d6 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -200,7 +202,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -458,3 +462,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml index 0e56b23..f59ef09 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml @@ -29,12 +29,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -436,12 +438,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -474,7 +480,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 @@ -502,7 +510,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml index d6318e1..f433f10 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.retinotopy.yaml @@ -106,14 +106,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -137,14 +142,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -167,14 +177,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -198,14 +213,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -229,23 +249,30 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true focal_depth: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: @@ -269,10 +296,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -296,19 +327,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml index 4204fe9..1a9d3a8 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml @@ -73,10 +73,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -152,13 +154,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -243,7 +249,9 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false offset: name: offset description: Scalar to add to the data after scaling by 'conversion' to finalize @@ -252,19 +260,23 @@ classes: to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text + required: true continuity: name: continuity description: Optionally describe the continuity of the data. Can be "continuous", @@ -277,6 +289,7 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text + required: false value: name: value range: AnyType @@ -315,10 +328,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -368,6 +385,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml index 8a994a2..e1d735b 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.behavior.yaml @@ -62,7 +62,9 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.device.yaml index 64b6e98..3f1acc9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml index bb13169..1562caf 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml @@ -37,6 +37,7 @@ classes: at 300 Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible. range: text + required: false data: name: data description: Recorded voltage data. @@ -305,12 +306,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml index eb94ee7..f468049 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.file.yaml @@ -34,6 +34,7 @@ classes: name: notes description: Any notes the user has about the dataset being stored range: text + required: true tree_root: true NWBFile: name: NWBFile @@ -51,7 +52,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.5.0) range: text + required: true + equals_string: 2.5.0 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -458,6 +462,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml index de10ce5..bdd9dd5 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text + required: true value: name: value array: @@ -132,7 +135,10 @@ classes: fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -153,7 +159,10 @@ classes: name: stimulus_description description: An IZeroClampSeries has no stimulus, so this attribute is automatically set to "N/A" + ifabsent: string(N/A) range: text + required: true + equals_string: N/A bias_current: name: bias_current description: Bias current, in amps, fixed to 0.0. @@ -205,7 +214,10 @@ classes: fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -286,7 +298,10 @@ classes: fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -304,7 +319,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -322,7 +340,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -341,7 +362,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -360,7 +384,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -379,7 +406,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -398,7 +428,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -417,7 +450,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -454,7 +490,10 @@ classes: fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -583,7 +622,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular electrode related metadata.) range: text + required: true + equals_string: Table for storing intracellular electrode related metadata. electrode: name: electrode description: Column for storing the reference to the intracellular electrode. @@ -603,7 +645,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular stimulus related metadata.) range: text + required: true + equals_string: Table for storing intracellular stimulus related metadata. stimulus: name: stimulus annotations: @@ -631,7 +676,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular response related metadata.) range: text + required: true + equals_string: Table for storing intracellular response related metadata. response: name: response annotations: @@ -672,7 +720,14 @@ classes: name: description description: Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute. + ifabsent: string(A table to group together a stimulus and response from a + single electrode and a single simultaneous recording and for storing metadata + about the intracellular recording.) range: text + required: true + equals_string: A table to group together a stimulus and response from a single + electrode and a single simultaneous recording and for storing metadata about + the intracellular recording. electrodes: name: electrodes description: Table for storing intracellular electrode related metadata. @@ -744,6 +799,7 @@ classes: table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: IntracellularRecordingsTable + required: true SequentialRecordingsTable: name: SequentialRecordingsTable description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable @@ -806,6 +862,7 @@ classes: table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: SimultaneousRecordingsTable + required: true RepetitionsTable: name: RepetitionsTable description: A table for grouping different sequential intracellular recordings @@ -859,6 +916,7 @@ classes: region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: SequentialRecordingsTable + required: true ExperimentalConditionsTable: name: ExperimentalConditionsTable description: A table for grouping different intracellular recording repetitions @@ -908,3 +966,4 @@ classes: applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: RepetitionsTable + required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml index c8a9988..0f6efd9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.image.yaml @@ -145,6 +145,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml index e7204ff..f663994 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -200,7 +202,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -458,3 +462,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml index 80df95c..2c94d34 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml @@ -29,12 +29,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -436,12 +438,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -474,7 +480,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 @@ -502,7 +510,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml index 31a6dd3..3a624b1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.retinotopy.yaml @@ -106,14 +106,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -137,14 +142,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -167,14 +177,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -198,14 +213,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -229,23 +249,30 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true focal_depth: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: @@ -269,10 +296,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -296,19 +327,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml index 6439b29..ad3eeb6 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml @@ -73,10 +73,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -152,13 +154,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -243,7 +249,9 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false offset: name: offset description: Scalar to add to the data after scaling by 'conversion' to finalize @@ -252,19 +260,23 @@ classes: to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text + required: true continuity: name: continuity description: Optionally describe the continuity of the data. Can be "continuous", @@ -277,6 +289,7 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text + required: false value: name: value range: AnyType @@ -315,10 +328,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -368,6 +385,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml index 9174b6a..650a4cd 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.behavior.yaml @@ -62,7 +62,9 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.device.yaml index a01cb4b..4dd254b 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml index 7145c03..3fab077 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml @@ -37,6 +37,7 @@ classes: at 300 Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible. range: text + required: false data: name: data description: Recorded voltage data. @@ -305,12 +306,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml index 3ddb452..f5d5d49 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.file.yaml @@ -34,6 +34,7 @@ classes: name: notes description: Any notes the user has about the dataset being stored range: text + required: true tree_root: true NWBFile: name: NWBFile @@ -51,7 +52,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.6.0) range: text + required: true + equals_string: 2.6.0 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -458,6 +462,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text @@ -817,7 +822,9 @@ classes: name: reference description: Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied. + ifabsent: string(birth) range: text + required: false value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml index c24417f..b3181bc 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text + required: true value: name: value array: @@ -132,7 +135,10 @@ classes: fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -153,7 +159,10 @@ classes: name: stimulus_description description: An IZeroClampSeries has no stimulus, so this attribute is automatically set to "N/A" + ifabsent: string(N/A) range: text + required: true + equals_string: N/A bias_current: name: bias_current description: Bias current, in amps, fixed to 0.0. @@ -205,7 +214,10 @@ classes: fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -286,7 +298,10 @@ classes: fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -304,7 +319,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -322,7 +340,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -341,7 +362,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -360,7 +384,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -379,7 +406,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -398,7 +428,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -417,7 +450,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -454,7 +490,10 @@ classes: fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -583,7 +622,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular electrode related metadata.) range: text + required: true + equals_string: Table for storing intracellular electrode related metadata. electrode: name: electrode description: Column for storing the reference to the intracellular electrode. @@ -603,7 +645,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular stimulus related metadata.) range: text + required: true + equals_string: Table for storing intracellular stimulus related metadata. stimulus: name: stimulus annotations: @@ -631,7 +676,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular response related metadata.) range: text + required: true + equals_string: Table for storing intracellular response related metadata. response: name: response annotations: @@ -672,7 +720,14 @@ classes: name: description description: Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute. + ifabsent: string(A table to group together a stimulus and response from a + single electrode and a single simultaneous recording and for storing metadata + about the intracellular recording.) range: text + required: true + equals_string: A table to group together a stimulus and response from a single + electrode and a single simultaneous recording and for storing metadata about + the intracellular recording. electrodes: name: electrodes description: Table for storing intracellular electrode related metadata. @@ -744,6 +799,7 @@ classes: table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: IntracellularRecordingsTable + required: true SequentialRecordingsTable: name: SequentialRecordingsTable description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable @@ -806,6 +862,7 @@ classes: table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: SimultaneousRecordingsTable + required: true RepetitionsTable: name: RepetitionsTable description: A table for grouping different sequential intracellular recordings @@ -859,6 +916,7 @@ classes: region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: SequentialRecordingsTable + required: true ExperimentalConditionsTable: name: ExperimentalConditionsTable description: A table for grouping different intracellular recording repetitions @@ -908,3 +966,4 @@ classes: applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: RepetitionsTable + required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml index 4d3fcd0..45bd0a3 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.image.yaml @@ -145,6 +145,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml index 07c142e..56f8824 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -200,7 +202,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -458,3 +462,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml index 877b532..00607b0 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml @@ -29,28 +29,34 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false exposure_time: name: exposure_time description: Exposure time of the sample; often the inverse of the frequency. range: float32 + required: false binning: name: binning description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. range: uint8 + required: false power: name: power description: Power of the excitation in mW, if known. range: float32 + required: false intensity: name: intensity description: Intensity of the excitation in mW/mm^2, if known. range: float32 + required: false imaging_plane: name: imaging_plane annotations: @@ -76,12 +82,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -483,12 +491,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -521,7 +533,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 @@ -549,7 +563,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml index e913c0d..dc790f3 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.retinotopy.yaml @@ -106,14 +106,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -137,14 +142,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -167,14 +177,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -198,14 +213,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -229,23 +249,30 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true focal_depth: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: @@ -269,10 +296,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -296,19 +327,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml index 0bd5bd0..ec53dc1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml @@ -73,10 +73,12 @@ classes: name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text + required: false value: name: value range: numeric @@ -152,13 +154,17 @@ classes: description: name: description description: Description of the time series. + ifabsent: string(no description) range: text + required: false comments: name: comments description: Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + ifabsent: string(no comments) range: text + required: false data: name: data description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first @@ -243,7 +249,9 @@ classes: to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + ifabsent: float(1.0) range: float32 + required: false offset: name: offset description: Scalar to add to the data after scaling by 'conversion' to finalize @@ -252,19 +260,23 @@ classes: to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. range: float32 + required: false resolution: name: resolution description: Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + ifabsent: float(-1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text + required: true continuity: name: continuity description: Optionally describe the continuity of the data. Can be "continuous", @@ -277,6 +289,7 @@ classes: the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. range: text + required: false value: name: value range: AnyType @@ -315,10 +328,14 @@ classes: name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -368,6 +385,7 @@ classes: name: description description: Description of this collection of images. range: text + required: true image: name: image description: Images stored in this collection. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml index eabccf6..0df664e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml @@ -62,7 +62,9 @@ classes: value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(meters) range: text + required: false value: name: value range: numeric diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml index 7719f95..ab2fc92 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml @@ -28,8 +28,10 @@ classes: description: Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. range: text + required: false manufacturer: name: manufacturer description: The name of the manufacturer of the device. range: text + required: false tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml index 378f644..8485a7d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml @@ -37,6 +37,7 @@ classes: at 300 Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible. range: text + required: false data: name: data description: Recorded voltage data. @@ -305,12 +306,14 @@ classes: name: description description: Description of this electrode group. range: text + required: true location: name: location description: Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. range: text + required: true position: name: position description: stereotaxic or common framework coordinates diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml index a0978ba..1b56d9d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml @@ -34,6 +34,7 @@ classes: name: notes description: Any notes the user has about the dataset being stored range: text + required: true tree_root: true NWBFile: name: NWBFile @@ -51,7 +52,10 @@ classes: name: nwb_version description: File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. + ifabsent: string(2.7.0) range: text + required: true + equals_string: 2.7.0 file_create_date: name: file_create_date description: 'A record of the date the file was created and of subsequent @@ -460,6 +464,7 @@ classes: name: file_name description: Name of script file. range: text + required: true value: name: value range: text @@ -819,7 +824,9 @@ classes: name: reference description: Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied. + ifabsent: string(birth) range: text + required: false value: name: value range: text diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml index 3ad625d..710ba36 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml @@ -29,10 +29,12 @@ classes: name: stimulus_description description: Protocol/stimulus name for this patch-clamp dataset. range: text + required: true sweep_number: name: sweep_number description: Sweep number, allows to group different PatchClampSeries together. range: uint32 + required: false data: name: data description: Recorded voltage or current. @@ -74,6 +76,7 @@ classes: values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. range: text + required: true value: name: value array: @@ -132,7 +135,10 @@ classes: fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -153,7 +159,10 @@ classes: name: stimulus_description description: An IZeroClampSeries has no stimulus, so this attribute is automatically set to "N/A" + ifabsent: string(N/A) range: text + required: true + equals_string: N/A bias_current: name: bias_current description: Bias current, in amps, fixed to 0.0. @@ -205,7 +214,10 @@ classes: fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -286,7 +298,10 @@ classes: fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(amperes) range: text + required: true + equals_string: amperes value: name: value range: AnyType @@ -304,7 +319,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -322,7 +340,10 @@ classes: unit: name: unit description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -341,7 +362,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + ifabsent: string(hertz) range: text + required: true + equals_string: hertz value: name: value range: float32 @@ -360,7 +384,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -379,7 +406,10 @@ classes: name: unit description: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + ifabsent: string(percent) range: text + required: true + equals_string: percent value: name: value range: float32 @@ -398,7 +428,10 @@ classes: name: unit description: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + ifabsent: string(farads) range: text + required: true + equals_string: farads value: name: value range: float32 @@ -417,7 +450,10 @@ classes: name: unit description: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + ifabsent: string(ohms) range: text + required: true + equals_string: ohms value: name: value range: float32 @@ -454,7 +490,10 @@ classes: fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + ifabsent: string(volts) range: text + required: true + equals_string: volts value: name: value range: AnyType @@ -583,7 +622,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular electrode related metadata.) range: text + required: true + equals_string: Table for storing intracellular electrode related metadata. electrode: name: electrode description: Column for storing the reference to the intracellular electrode. @@ -603,7 +645,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular stimulus related metadata.) range: text + required: true + equals_string: Table for storing intracellular stimulus related metadata. stimulus: name: stimulus annotations: @@ -645,7 +690,10 @@ classes: description: name: description description: Description of what is in this dynamic table. + ifabsent: string(Table for storing intracellular response related metadata.) range: text + required: true + equals_string: Table for storing intracellular response related metadata. response: name: response annotations: @@ -686,7 +734,14 @@ classes: name: description description: Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute. + ifabsent: string(A table to group together a stimulus and response from a + single electrode and a single simultaneous recording and for storing metadata + about the intracellular recording.) range: text + required: true + equals_string: A table to group together a stimulus and response from a single + electrode and a single simultaneous recording and for storing metadata about + the intracellular recording. electrodes: name: electrodes description: Table for storing intracellular electrode related metadata. @@ -758,6 +813,7 @@ classes: table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: IntracellularRecordingsTable + required: true SequentialRecordingsTable: name: SequentialRecordingsTable description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable @@ -820,6 +876,7 @@ classes: table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: SimultaneousRecordingsTable + required: true RepetitionsTable: name: RepetitionsTable description: A table for grouping different sequential intracellular recordings @@ -873,6 +930,7 @@ classes: region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: SequentialRecordingsTable + required: true ExperimentalConditionsTable: name: ExperimentalConditionsTable description: A table for grouping different intracellular recording repetitions @@ -922,3 +980,4 @@ classes: applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here. range: RepetitionsTable + required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml index b87f670..cac5d73 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml @@ -145,6 +145,8 @@ classes: (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. range: int32 + required: true + multivalued: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml index bedbbeb..9395fd9 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml @@ -72,7 +72,9 @@ classes: description: Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + ifabsent: string(see 'feature_units') range: text + required: false value: name: value range: numeric @@ -200,7 +202,9 @@ classes: description: Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + ifabsent: string(no unit) range: text + required: true value: name: value array: @@ -458,3 +462,4 @@ classes: if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. range: float64 + required: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml index 2e5036c..478cc92 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml @@ -29,28 +29,34 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false exposure_time: name: exposure_time description: Exposure time of the sample; often the inverse of the frequency. range: float32 + required: false binning: name: binning description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. range: uint8 + required: false power: name: power description: Power of the excitation in mW, if known. range: float32 + required: false intensity: name: intensity description: Intensity of the excitation in mW/mm^2, if known. range: float32 + required: false imaging_plane: name: imaging_plane annotations: @@ -76,12 +82,14 @@ classes: name: pmt_gain description: Photomultiplier gain. range: float32 + required: false scan_line_rate: name: scan_line_rate description: Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. range: float32 + required: false field_of_view: name: field_of_view description: Width, height and depth of image, or imaged area, in meters. @@ -483,12 +491,16 @@ classes: x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + ifabsent: float(1.0) range: float32 + required: false unit: name: unit description: Base unit of measurement for working with the data. The default value is 'meters'. + ifabsent: string(meters) range: text + required: false value: name: value range: float32 @@ -521,7 +533,9 @@ classes: unit: name: unit description: Measurement units for origin_coords. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 @@ -549,7 +563,9 @@ classes: unit: name: unit description: Measurement units for grid_spacing. The default value is 'meters'. + ifabsent: string(meters) range: text + required: true value: name: value range: float32 diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml index f3173d3..6416821 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml @@ -106,14 +106,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -137,14 +142,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -167,14 +177,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -198,14 +213,19 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true unit: name: unit description: Unit that axis data is stored in (e.g., degrees). range: text + required: true value: name: value array: @@ -229,23 +249,30 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true focal_depth: name: focal_depth description: Focal depth offset, in meters. range: float32 + required: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: @@ -269,10 +296,14 @@ classes: description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true value: name: value array: @@ -296,19 +327,25 @@ classes: description: Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value range: int32 + required: true dimension: name: dimension description: 'Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.' range: int32 + required: true + multivalued: true field_of_view: name: field_of_view description: Size of viewing area, in meters. range: float32 + required: true + multivalued: true format: name: format description: Format of image. Right now only 'raw' is supported. range: text + required: true value: name: value array: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.sparse.yaml index 6c1dbe1..8220620 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.sparse.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.sparse.yaml @@ -23,7 +23,13 @@ classes: shape: name: shape description: the shape of this sparse matrix + array: + dimensions: + - alias: 'null' + exact_cardinality: 2 range: int + required: true + multivalued: false indices: name: indices description: column indices diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.table.yaml index f7a2a0a..27a272c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.table.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.table.yaml @@ -34,6 +34,7 @@ classes: name: target description: Target dataset that this index applies to. range: Data + required: true tree_root: true VectorData: name: VectorData @@ -55,6 +56,7 @@ classes: name: description description: Description of what these vectors represent. range: text + required: true tree_root: true VectorIndex: name: VectorIndex @@ -71,6 +73,7 @@ classes: name: target description: Reference to the target dataset that this index applies to. range: VectorData + required: true tree_root: true ElementIdentifiers: name: ElementIdentifiers @@ -106,10 +109,12 @@ classes: description: Reference to the DynamicTable object that this region applies to. range: DynamicTable + required: true description: name: description description: Description of what this table region points to. range: text + required: true tree_root: true Container: name: Container @@ -155,10 +160,13 @@ classes: description: The names of the columns in this table. This should be used to specify an order to the columns. range: text + required: true + multivalued: true description: name: description description: Description of what is in this dynamic table. range: text + required: true id: name: id description: Array of unique identifiers for the rows of this dynamic table. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.sparse.yaml index 5c825a8..42a1170 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.sparse.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.sparse.yaml @@ -23,7 +23,13 @@ classes: shape: name: shape description: the shape of this sparse matrix + array: + dimensions: + - alias: 'null' + exact_cardinality: 2 range: int + required: true + multivalued: false indices: name: indices description: column indices diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.table.yaml index dec9fc7..fe82d7d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.table.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.table.yaml @@ -34,6 +34,7 @@ classes: name: target description: Target dataset that this index applies to. range: Data + required: true tree_root: true VectorData: name: VectorData @@ -55,6 +56,7 @@ classes: name: description description: Description of what these vectors represent. range: text + required: true tree_root: true VectorIndex: name: VectorIndex @@ -71,6 +73,7 @@ classes: name: target description: Reference to the target dataset that this index applies to. range: VectorData + required: true tree_root: true ElementIdentifiers: name: ElementIdentifiers @@ -106,10 +109,12 @@ classes: description: Reference to the DynamicTable object that this region applies to. range: DynamicTable + required: true description: name: description description: Description of what this table region points to. range: text + required: true tree_root: true Container: name: Container @@ -155,10 +160,13 @@ classes: description: The names of the columns in this table. This should be used to specify an order to the columns. range: text + required: true + multivalued: true description: name: description description: Description of what is in this dynamic table. range: text + required: true id: name: id description: Array of unique identifiers for the rows of this dynamic table. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.sparse.yaml index a7b2bb8..bdb5f39 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.sparse.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.sparse.yaml @@ -23,7 +23,13 @@ classes: shape: name: shape description: the shape of this sparse matrix + array: + dimensions: + - alias: 'null' + exact_cardinality: 2 range: int + required: true + multivalued: false indices: name: indices description: column indices diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml index ec35619..4285b03 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.table.yaml @@ -34,6 +34,7 @@ classes: name: target description: Target dataset that this index applies to. range: Data + required: true tree_root: true VectorData: name: VectorData @@ -55,6 +56,7 @@ classes: name: description description: Description of what these vectors represent. range: text + required: true value: name: value range: AnyType @@ -93,6 +95,7 @@ classes: name: target description: Reference to the target dataset that this index applies to. range: VectorData + required: true value: name: value array: @@ -134,10 +137,12 @@ classes: description: Reference to the DynamicTable object that this region applies to. range: DynamicTable + required: true description: name: description description: Description of what this table region points to. range: text + required: true tree_root: true Container: name: Container @@ -183,10 +188,13 @@ classes: description: The names of the columns in this table. This should be used to specify an order to the columns. range: text + required: true + multivalued: true description: name: description description: Description of what is in this dynamic table. range: text + required: true id: name: id description: Array of unique identifiers for the rows of this dynamic table. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.sparse.yaml index c5ec31d..24ea8fd 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.sparse.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.sparse.yaml @@ -29,6 +29,8 @@ classes: description: The shape (number of rows, number of columns) of this sparse matrix. range: uint + required: true + multivalued: true indices: name: indices description: The column indices. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml index aaef099..9ed7bc1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.table.yaml @@ -33,6 +33,7 @@ classes: name: description description: Description of what these vectors represent. range: text + required: true value: name: value range: AnyType @@ -73,6 +74,7 @@ classes: name: target description: Reference to the target dataset that this index applies to. range: VectorData + required: true tree_root: true ElementIdentifiers: name: ElementIdentifiers @@ -108,10 +110,12 @@ classes: description: Reference to the DynamicTable object that this region applies to. range: DynamicTable + required: true description: name: description description: Description of what this table region points to. range: text + required: true tree_root: true DynamicTable: name: DynamicTable @@ -144,10 +148,13 @@ classes: description: The names of the columns in this table. This should be used to specify an order to the columns. range: text + required: true + multivalued: true description: name: description description: Description of what is in this dynamic table. range: text + required: true id: name: id description: Array of unique identifiers for the rows of this dynamic table. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml index 74e9c04..842d1d6 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml @@ -29,6 +29,8 @@ classes: description: The shape (number of rows, number of columns) of this sparse matrix. range: uint + required: true + multivalued: true indices: name: indices description: The column indices. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml index d3398c4..938ab2d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml @@ -33,6 +33,7 @@ classes: name: description description: Description of what these vectors represent. range: text + required: true value: name: value range: AnyType @@ -73,6 +74,7 @@ classes: name: target description: Reference to the target dataset that this index applies to. range: VectorData + required: true tree_root: true ElementIdentifiers: name: ElementIdentifiers @@ -108,10 +110,12 @@ classes: description: Reference to the DynamicTable object that this region applies to. range: DynamicTable + required: true description: name: description description: Description of what this table region points to. range: text + required: true tree_root: true DynamicTable: name: DynamicTable @@ -144,10 +148,13 @@ classes: description: The names of the columns in this table. This should be used to specify an order to the columns. range: text + required: true + multivalued: true description: name: description description: Description of what is in this dynamic table. range: text + required: true id: name: id description: Array of unique identifiers for the rows of this dynamic table. diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml index c14e264..0a31806 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml @@ -28,4 +28,5 @@ classes: description: Reference to the VectorData object that contains the enumerable elements range: VectorData + required: true tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml index 7bc2244..c6cf1d4 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml @@ -28,4 +28,5 @@ classes: description: Reference to the VectorData object that contains the enumerable elements range: VectorData + required: true tree_root: true From c06859a5375a0dc24609c96f9e688e0e83381abb Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 20:51:52 -0700 Subject: [PATCH 18/61] first impl of dynamictable working! --- nwb_linkml/pdm.lock | 434 +++++++++--------- nwb_linkml/pyproject.toml | 2 +- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 20 +- .../hdmf_common/v1_8_0/hdmf_common_table.py | 30 +- nwb_linkml/tests/test_includes/test_hdmf.py | 85 +++- 5 files changed, 336 insertions(+), 235 deletions(-) diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock index 5ed71bf..d81da1e 100644 --- a/nwb_linkml/pdm.lock +++ b/nwb_linkml/pdm.lock @@ -2,17 +2,23 @@ # It is not intended for manual editing. [metadata] -groups = ["default", "dev", "tests"] -strategy = ["cross_platform", "inherit_metadata"] -lock_version = "4.4.2" -content_hash = "sha256:d5877e87ce194e1cd8d62d8e935e39941e0c8f78667e0240ff64690f848d4a6c" +groups = ["default", "dev", "plot", "tests"] +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:6b84e75b424a643bdfbc4cb0d775e6db831b27c7a770097e28dc8eb8bf76b54d" + +[[metadata.targets]] +requires_python = ">=3.10,<3.13" [[package]] name = "annotated-types" version = "0.7.0" requires_python = ">=3.8" summary = "Reusable constraint types to use with typing.Annotated" -groups = ["default", "dev", "tests"] +groups = ["default"] +dependencies = [ + "typing-extensions>=4.0.0; python_version < \"3.9\"", +] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -22,7 +28,10 @@ files = [ name = "antlr4-python3-runtime" version = "4.9.3" summary = "ANTLR 4.9.3 runtime for Python 3.7" -groups = ["default", "dev", "tests"] +groups = ["default"] +dependencies = [ + "typing; python_version < \"3.5\"", +] files = [ {file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"}, ] @@ -32,7 +41,7 @@ name = "arrow" version = "1.3.0" requires_python = ">=3.8" summary = "Better dates & times for Python" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "python-dateutil>=2.7.0", "types-python-dateutil>=2.8.10", @@ -44,21 +53,24 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.1.0" requires_python = ">=3.7" summary = "Classes Without Boilerplate" groups = ["default", "dev", "tests"] +dependencies = [ + "importlib-metadata; python_version < \"3.8\"", +] files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.1.0-py3-none-any.whl", hash = "sha256:377b47448cb61fea38533f671fba0d0f8a96fd58facd4dc518e3dac9dbea0905"}, + {file = "attrs-24.1.0.tar.gz", hash = "sha256:adbdec84af72d38be7628e353a09b6a6790d15cd71819f6e9d7b0faa8a125745"}, ] [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" requires_python = ">=3.8" summary = "The uncompromising code formatter." -groups = ["default", "dev", "tests"] +groups = ["default", "dev"] dependencies = [ "click>=8.0.0", "mypy-extensions>=0.4.3", @@ -69,20 +81,20 @@ dependencies = [ "typing-extensions>=4.0.1; python_version < \"3.11\"", ] files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [[package]] @@ -90,7 +102,7 @@ name = "blinker" version = "1.8.2" requires_python = ">=3.8" summary = "Fast, simple object-to-object and broadcast signaling" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] files = [ {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, @@ -117,7 +129,7 @@ name = "certifi" version = "2024.7.4" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, @@ -127,7 +139,7 @@ files = [ name = "cfgraph" version = "0.2.1" summary = "rdflib collections flattening graph" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "rdflib>=0.4.2", ] @@ -140,7 +152,7 @@ name = "chardet" version = "5.2.0" requires_python = ">=3.7" summary = "Universal encoding detector for Python 3" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, @@ -151,7 +163,7 @@ name = "charset-normalizer" version = "3.3.2" requires_python = ">=3.7.0" summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, @@ -207,9 +219,10 @@ name = "click" version = "8.1.7" requires_python = ">=3.7" summary = "Composable command line interface toolkit" -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] dependencies = [ "colorama; platform_system == \"Windows\"", + "importlib-metadata; python_version < \"3.8\"", ] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, @@ -221,7 +234,7 @@ name = "cloudpickle" version = "3.0.0" requires_python = ">=3.8" summary = "Pickler class to extend the standard pickle.Pickler functionality" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, @@ -232,7 +245,7 @@ name = "colorama" version = "0.4.6" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" summary = "Cross-platform colored terminal text." -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -264,7 +277,6 @@ files = [ {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] @@ -299,7 +311,6 @@ files = [ {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] @@ -324,7 +335,7 @@ name = "curies" version = "0.7.10" requires_python = ">=3.8" summary = "Idiomatic conversion between URIs and compact URIs (CURIEs)." -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "pydantic", "pytrie", @@ -340,7 +351,7 @@ name = "dash" version = "2.17.1" requires_python = ">=3.8" summary = "A Python framework for building reactive web-apps. Developed by Plotly." -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] dependencies = [ "Flask<3.1,>=1.0.4", "Werkzeug<3.1", @@ -364,7 +375,7 @@ files = [ name = "dash-core-components" version = "2.0.0" summary = "Core component suite for Dash" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] files = [ {file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"}, {file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"}, @@ -374,7 +385,7 @@ files = [ name = "dash-cytoscape" version = "0.3.0" summary = "A Component Library for Dash aimed at facilitating network visualization in Python, wrapped around Cytoscape.js" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] dependencies = [ "dash", ] @@ -387,7 +398,7 @@ files = [ name = "dash-html-components" version = "2.0.0" summary = "Vanilla HTML components for Dash" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] files = [ {file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"}, {file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"}, @@ -397,7 +408,7 @@ files = [ name = "dash-table" version = "5.0.0" summary = "Dash table" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] files = [ {file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"}, {file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"}, @@ -408,7 +419,7 @@ name = "dask" version = "2024.7.1" requires_python = ">=3.9" summary = "Parallel PyData with Task Scheduling" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "click>=8.1", "cloudpickle>=1.5.0", @@ -429,7 +440,7 @@ name = "deprecated" version = "1.2.14" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" summary = "Python @deprecated decorator to deprecate old python classes, functions or methods." -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "wrapt<2,>=1.10", ] @@ -452,7 +463,7 @@ name = "et-xmlfile" version = "1.1.0" requires_python = ">=3.6" summary = "An implementation of lxml.xmlfile for the standard library" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, @@ -475,12 +486,13 @@ name = "flask" version = "3.0.3" requires_python = ">=3.8" summary = "A simple framework for building complex web applications." -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] dependencies = [ "Jinja2>=3.1.2", "Werkzeug>=3.0.0", "blinker>=1.6.2", "click>=8.1.3", + "importlib-metadata>=3.6.0; python_version < \"3.10\"", "itsdangerous>=2.1.2", ] files = [ @@ -493,7 +505,10 @@ name = "fqdn" version = "1.5.1" requires_python = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" summary = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -groups = ["default", "dev", "tests"] +groups = ["default"] +dependencies = [ + "cached-property>=1.3.0; python_version < \"3.8\"", +] files = [ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, @@ -504,7 +519,7 @@ name = "fsspec" version = "2024.6.1" requires_python = ">=3.8" summary = "File-system specification" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, @@ -516,6 +531,9 @@ version = "1.2.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" summary = "A backport of fstrings to python<3.6" groups = ["dev", "tests"] +dependencies = [ + "tokenize-rt>=3; python_version < \"3.6\"", +] files = [ {file = "future_fstrings-1.2.0-py2.py3-none-any.whl", hash = "sha256:90e49598b553d8746c4dc7d9442e0359d038c3039d802c91c0a55505da318c63"}, {file = "future_fstrings-1.2.0.tar.gz", hash = "sha256:6cf41cbe97c398ab5a81168ce0dbb8ad95862d3caf23c21e4430627b90844089"}, @@ -537,7 +555,7 @@ name = "graphviz" version = "0.20.3" requires_python = ">=3.8" summary = "Simple Python interface for Graphviz" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "graphviz-0.20.3-py3-none-any.whl", hash = "sha256:81f848f2904515d8cd359cc611faba817598d2feaac4027b266aa3eda7b3dde5"}, {file = "graphviz-0.20.3.zip", hash = "sha256:09d6bc81e6a9fa392e7ba52135a9d49f1ed62526f96499325930e87ca1b5925d"}, @@ -548,7 +566,7 @@ name = "greenlet" version = "3.0.3" requires_python = ">=3.7" summary = "Lightweight in-process concurrent programming" -groups = ["default", "dev", "tests"] +groups = ["default"] marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"" files = [ {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, @@ -586,7 +604,7 @@ name = "h5py" version = "3.11.0" requires_python = ">=3.8" summary = "Read and write HDF5 files from Python" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "numpy>=1.17.3", ] @@ -611,7 +629,7 @@ name = "hbreader" version = "0.9.1" requires_python = ">=3.7" summary = "Honey Badger reader - a generic file/url/string open and read tool" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "hbreader-0.9.1-py3-none-any.whl", hash = "sha256:9a6e76c9d1afc1b977374a5dc430a1ebb0ea0488205546d4678d6e31cc5f6801"}, {file = "hbreader-0.9.1.tar.gz", hash = "sha256:d2c132f8ba6276d794c66224c3297cec25c8079d0a4cf019c061611e0a3b94fa"}, @@ -622,7 +640,7 @@ name = "idna" version = "3.7" requires_python = ">=3.5" summary = "Internationalized Domain Names in Applications (IDNA)" -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, @@ -633,8 +651,9 @@ name = "importlib-metadata" version = "8.2.0" requires_python = ">=3.8" summary = "Read metadata from Python packages" -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] dependencies = [ + "typing-extensions>=3.6.4; python_version < \"3.8\"", "zipp>=0.5", ] files = [ @@ -657,7 +676,7 @@ files = [ name = "isodate" version = "0.6.1" summary = "An ISO 8601 date/time/duration parser and formatter" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "six", ] @@ -671,7 +690,7 @@ name = "isoduration" version = "20.11.0" requires_python = ">=3.7" summary = "Operations with ISO 8601 durations" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "arrow>=0.15.0", ] @@ -685,7 +704,7 @@ name = "itsdangerous" version = "2.2.0" requires_python = ">=3.8" summary = "Safely pass data to untrusted environments and back." -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -696,7 +715,7 @@ name = "jinja2" version = "3.1.4" requires_python = ">=3.7" summary = "A very fast and expressive template engine." -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] dependencies = [ "MarkupSafe>=2.0", ] @@ -710,7 +729,7 @@ name = "json-flattener" version = "0.1.9" requires_python = ">=3.7.0" summary = "Python library for denormalizing nested dicts or json objects to tables and back" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "click", "pyyaml", @@ -724,7 +743,7 @@ files = [ name = "jsonasobj" version = "1.3.1" summary = "JSON as python objects" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "jsonasobj-1.3.1-py3-none-any.whl", hash = "sha256:b9e329dc1ceaae7cf5d5b214684a0b100e0dad0be6d5bbabac281ec35ddeca65"}, {file = "jsonasobj-1.3.1.tar.gz", hash = "sha256:d52e0544a54a08f6ea3f77fa3387271e3648655e0eace2f21e825c26370e44a2"}, @@ -735,7 +754,7 @@ name = "jsonasobj2" version = "1.0.4" requires_python = ">=3.6" summary = "JSON as python objects - version 2" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "hbreader", ] @@ -749,7 +768,7 @@ name = "jsonpatch" version = "1.33" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" summary = "Apply JSON-Patches (RFC 6902) " -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "jsonpointer>=1.9", ] @@ -762,7 +781,7 @@ files = [ name = "jsonpath-ng" version = "1.6.1" summary = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "ply", ] @@ -776,7 +795,7 @@ name = "jsonpointer" version = "3.0.0" requires_python = ">=3.7" summary = "Identify specific nodes in a JSON document (RFC 6901) " -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, @@ -787,10 +806,12 @@ name = "jsonschema" version = "4.23.0" requires_python = ">=3.8" summary = "An implementation of JSON Schema validation for Python" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "attrs>=22.2.0", + "importlib-resources>=1.4.0; python_version < \"3.9\"", "jsonschema-specifications>=2023.03.6", + "pkgutil-resolve-name>=1.3.10; python_version < \"3.9\"", "referencing>=0.28.4", "rpds-py>=0.7.1", ] @@ -804,8 +825,9 @@ name = "jsonschema-specifications" version = "2023.12.1" requires_python = ">=3.8" summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ + "importlib-resources>=1.4.0; python_version < \"3.9\"", "referencing>=0.31.0", ] files = [ @@ -819,7 +841,7 @@ version = "4.23.0" extras = ["format"] requires_python = ">=3.8" summary = "An implementation of JSON Schema validation for Python" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "fqdn", "idna", @@ -844,7 +866,7 @@ git = "https://github.com/sneakers-the-rat/linkml" ref = "nwb-linkml" revision = "0a6578bff4713688260f64b3076b197bd6decce9" summary = "Linked Open Data Modeling Language" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "antlr4-python3-runtime<4.10,==4.*,>=4.9.0", "click>=7.0", @@ -869,6 +891,7 @@ dependencies = [ "rdflib>=6.0.0", "requests>=2.22", "sqlalchemy>=1.4.31", + "typing-extensions>=4.4.0; python_version < \"3.9\"", "watchdog>=0.9.0", ] @@ -877,7 +900,7 @@ name = "linkml-dataops" version = "0.1.0" requires_python = ">=3.7" summary = "LinkML Data Operations API" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "jinja2", "jsonpatch", @@ -895,7 +918,7 @@ name = "linkml-runtime" version = "1.8.0" requires_python = "<4.0,>=3.8" summary = "Runtime environment for LinkML, the Linked open data modeling language" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "click", "curies>=0.5.4", @@ -921,7 +944,7 @@ name = "locket" version = "1.0.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" summary = "File-based locks for Python on Linux and Windows" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"}, {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, @@ -932,7 +955,7 @@ name = "markdown-it-py" version = "3.0.0" requires_python = ">=3.8" summary = "Python port of markdown-it. Markdown parsing, done right!" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "mdurl~=0.1", ] @@ -946,7 +969,7 @@ name = "markupsafe" version = "2.1.5" requires_python = ">=3.7" summary = "Safely add untrusted strings to HTML/XML markup." -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, @@ -986,7 +1009,7 @@ name = "mdurl" version = "0.1.2" requires_python = ">=3.7" summary = "Markdown URL utilities" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -997,7 +1020,7 @@ name = "mypy-extensions" version = "1.0.0" requires_python = ">=3.5" summary = "Type system extensions for programs checked with the mypy type checker." -groups = ["default", "dev", "tests"] +groups = ["default", "dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -1008,7 +1031,7 @@ name = "nest-asyncio" version = "1.6.0" requires_python = ">=3.5" summary = "Patch asyncio to allow nested event loops" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -1030,9 +1053,11 @@ name = "nptyping" version = "2.5.0" requires_python = ">=3.7" summary = "Type hints for NumPy." -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "numpy<2.0.0,>=1.20.0; python_version >= \"3.8\"", + "numpy==1.21.5; python_version < \"3.8\"", + "typing-extensions<5.0.0,>=4.0.0; python_version < \"3.10\"", ] files = [ {file = "nptyping-2.5.0-py3-none-any.whl", hash = "sha256:764e51836faae33a7ae2e928af574cfb701355647accadcc89f2ad793630b7c8"}, @@ -1044,7 +1069,7 @@ name = "numpy" version = "1.26.4" requires_python = ">=3.9" summary = "Fundamental package for array computing in Python" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -1070,26 +1095,23 @@ files = [ {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] name = "numpydantic" -version = "1.2.2" +version = "1.3.0" requires_python = "<4.0,>=3.9" summary = "Type and shape validation and serialization for numpy arrays in pydantic models" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ - "nptyping>=2.5.0", "numpy>=1.24.0", "pydantic>=2.3.0", + "typing-extensions>=4.11.0; python_version < \"3.11\"", ] files = [ - {file = "numpydantic-1.2.2-py3-none-any.whl", hash = "sha256:05481d7dbb202b6e31a97d77fa9d650b57766f996981076844572c7ddeff1a0b"}, - {file = "numpydantic-1.2.2.tar.gz", hash = "sha256:4e81c1d162b7a8bbec52c69011e745488bd452b017375d7df8f1de8ce96705a1"}, + {file = "numpydantic-1.3.0-py3-none-any.whl", hash = "sha256:bda3aa2cd858e9211006be8b8e589e1905b2c6a2db17cec0c28563ba1ad66b68"}, + {file = "numpydantic-1.3.0.tar.gz", hash = "sha256:b3931d51ba7e22d48bdd2ae56cad368f63db99ef74e8570021a7fd176b2ffc1f"}, ] [[package]] @@ -1097,7 +1119,7 @@ name = "nwb-schema-language" version = "0.1.3" requires_python = ">=3.9,<4.0" summary = "Translation of the nwb-schema-language to LinkML" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "linkml-runtime<2.0.0,>=1.1.24", "pydantic<3.0.0,>=2.3.0", @@ -1112,7 +1134,7 @@ name = "openpyxl" version = "3.1.5" requires_python = ">=3.8" summary = "A Python library to read/write Excel 2010 xlsx/xlsm files" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "et-xmlfile", ] @@ -1126,7 +1148,7 @@ name = "packaging" version = "24.1" requires_python = ">=3.8" summary = "Core utilities for Python packages" -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, @@ -1137,7 +1159,7 @@ name = "pandas" version = "2.2.2" requires_python = ">=3.9" summary = "Powerful data structures for data analysis, time series, and statistics" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "numpy>=1.22.4; python_version < \"3.11\"", "numpy>=1.23.2; python_version == \"3.11\"", @@ -1175,7 +1197,7 @@ files = [ name = "parse" version = "1.20.2" summary = "parse() is the opposite of format()" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "parse-1.20.2-py2.py3-none-any.whl", hash = "sha256:967095588cb802add9177d0c0b6133b5ba33b1ea9007ca800e526f42a85af558"}, {file = "parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce"}, @@ -1186,7 +1208,7 @@ name = "partd" version = "1.4.2" requires_python = ">=3.9" summary = "Appendable key-value storage" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "locket", "toolz", @@ -1201,7 +1223,7 @@ name = "pathspec" version = "0.12.1" requires_python = ">=3.8" summary = "Utility library for gitignore style pattern matching of file paths." -groups = ["default", "dev", "tests"] +groups = ["default", "dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1223,7 +1245,7 @@ name = "plotly" version = "5.23.0" requires_python = ">=3.8" summary = "An open-source, interactive data visualization library for Python" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] dependencies = [ "packaging", "tenacity>=6.2.0", @@ -1248,7 +1270,7 @@ files = [ name = "ply" version = "3.11" summary = "Python Lex & Yacc" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, @@ -1259,7 +1281,7 @@ name = "prefixcommons" version = "0.1.12" requires_python = ">=3.7,<4.0" summary = "A python API for working with ID prefixes" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "PyYAML<7.0,>=6.0", "click<9.0.0,>=8.1.3", @@ -1276,7 +1298,7 @@ name = "prefixmaps" version = "0.2.5" requires_python = "<4.0,>=3.8" summary = "A python library for retrieving semantic prefix maps" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "curies>=0.5.3", "pyyaml>=5.3.1", @@ -1291,10 +1313,11 @@ name = "pydantic" version = "2.8.2" requires_python = ">=3.8" summary = "Data validation using Python type hints" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "annotated-types>=0.4.0", "pydantic-core==2.20.1", + "typing-extensions>=4.12.2; python_version >= \"3.13\"", "typing-extensions>=4.6.1; python_version < \"3.13\"", ] files = [ @@ -1307,7 +1330,7 @@ name = "pydantic-core" version = "2.20.1" requires_python = ">=3.8" summary = "Core functionality for Pydantic validation and serialization" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "typing-extensions!=4.7.0,>=4.6.0", ] @@ -1356,14 +1379,6 @@ files = [ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] @@ -1372,7 +1387,7 @@ name = "pydantic-settings" version = "2.4.0" requires_python = ">=3.8" summary = "Settings management using Pydantic" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "pydantic>=2.7.0", "python-dotenv>=0.21.0", @@ -1387,7 +1402,7 @@ name = "pygments" version = "2.18.0" requires_python = ">=3.8" summary = "Pygments is a syntax highlighting package written in Python." -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, @@ -1397,7 +1412,7 @@ files = [ name = "pyjsg" version = "0.11.10" summary = "Python JSON Schema Grammar interpreter" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "antlr4-python3-runtime~=4.9.3", "jsonasobj>=1.2.1", @@ -1412,7 +1427,7 @@ name = "pyparsing" version = "3.1.2" requires_python = ">=3.6.8" summary = "pyparsing module - Classes and methods to define and execute parsing grammars" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, @@ -1423,7 +1438,7 @@ name = "pyshex" version = "0.8.1" requires_python = ">=3.6" summary = "Python ShEx Implementation" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "cfgraph>=0.2.1", "chardet", @@ -1445,7 +1460,7 @@ name = "pyshexc" version = "0.9.1" requires_python = ">=3.7" summary = "PyShExC - Python ShEx compiler" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "antlr4-python3-runtime~=4.9.3", "chardet", @@ -1468,6 +1483,7 @@ groups = ["default", "dev", "tests"] dependencies = [ "colorama; sys_platform == \"win32\"", "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", + "importlib-metadata>=0.12; python_version < \"3.8\"", "iniconfig", "packaging", "pluggy<2.0,>=0.12", @@ -1513,7 +1529,7 @@ files = [ name = "pytest-logging" version = "2015.11.4" summary = "Configures logging and allows tweaking the log level with a py.test flag" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "pytest>=2.8.1", ] @@ -1555,7 +1571,7 @@ name = "python-dateutil" version = "2.9.0.post0" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" summary = "Extensions to the standard Python datetime module" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "six>=1.5", ] @@ -1569,7 +1585,7 @@ name = "python-dotenv" version = "1.0.1" requires_python = ">=3.8" summary = "Read key-value pairs from a .env file and set them as environment variables" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -1579,7 +1595,7 @@ files = [ name = "pytrie" version = "0.4.0" summary = "A pure Python implementation of the trie data structure." -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "sortedcontainers", ] @@ -1592,7 +1608,7 @@ files = [ name = "pytz" version = "2024.1" summary = "World timezone definitions, modern and historical" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, @@ -1603,7 +1619,7 @@ name = "pyyaml" version = "6.0.1" requires_python = ">=3.6" summary = "YAML parser and emitter for Python" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, @@ -1636,7 +1652,7 @@ name = "rdflib" version = "7.0.0" requires_python = ">=3.8.1,<4.0.0" summary = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information." -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "isodate<0.7.0,>=0.6.0", "pyparsing<4,>=2.1.0", @@ -1650,7 +1666,7 @@ files = [ name = "rdflib-jsonld" version = "0.6.1" summary = "rdflib extension adding JSON-LD parser and serializer" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "rdflib>=5.0.0", ] @@ -1664,7 +1680,7 @@ name = "rdflib-shim" version = "1.0.3" requires_python = ">=3.7" summary = "Shim for rdflib 5 and 6 incompatibilities" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "rdflib-jsonld==0.6.1", "rdflib>=5.0.0", @@ -1679,7 +1695,7 @@ name = "referencing" version = "0.35.1" requires_python = ">=3.8" summary = "JSON Referencing + Python" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "attrs>=22.2.0", "rpds-py>=0.7.0", @@ -1694,7 +1710,7 @@ name = "requests" version = "2.32.3" requires_python = ">=3.8" summary = "Python HTTP for Humans." -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] dependencies = [ "certifi>=2017.4.17", "charset-normalizer<4,>=2", @@ -1729,7 +1745,7 @@ files = [ name = "retrying" version = "1.3.4" summary = "Retrying" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] dependencies = [ "six>=1.7.0", ] @@ -1743,7 +1759,7 @@ name = "rfc3339-validator" version = "0.1.4" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" summary = "A pure python RFC3339 validator" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "six", ] @@ -1756,7 +1772,7 @@ files = [ name = "rfc3987" version = "1.3.8" summary = "Parsing and validation of URIs (RFC 3986) and IRIs (RFC 3987)" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "rfc3987-1.3.8-py2.py3-none-any.whl", hash = "sha256:10702b1e51e5658843460b189b185c0366d2cf4cff716f13111b0ea9fd2dce53"}, {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, @@ -1767,10 +1783,11 @@ name = "rich" version = "13.7.1" requires_python = ">=3.7.0" summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "markdown-it-py>=2.2.0", "pygments<3.0.0,>=2.13.0", + "typing-extensions<5.0,>=4.0.0; python_version < \"3.9\"", ] files = [ {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, @@ -1782,7 +1799,7 @@ name = "rpds-py" version = "0.19.1" requires_python = ">=3.8" summary = "Python bindings to Rust's persistent data structures (rpds)" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "rpds_py-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:aaf71f95b21f9dc708123335df22e5a2fef6307e3e6f9ed773b2e0938cc4d491"}, {file = "rpds_py-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca0dda0c5715efe2ab35bb83f813f681ebcd2840d8b1b92bfc6fe3ab382fae4a"}, @@ -1835,18 +1852,6 @@ files = [ {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:5e58b61dcbb483a442c6239c3836696b79f2cd8e7eec11e12155d3f6f2d886d1"}, {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39d67896f7235b2c886fb1ee77b1491b77049dcef6fbf0f401e7b4cbed86bbd4"}, {file = "rpds_py-0.19.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8b32cd4ab6db50c875001ba4f5a6b30c0f42151aa1fbf9c2e7e3674893fb1dc4"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1c32e41de995f39b6b315d66c27dea3ef7f7c937c06caab4c6a79a5e09e2c415"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a129c02b42d46758c87faeea21a9f574e1c858b9f358b6dd0bbd71d17713175"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:346557f5b1d8fd9966059b7a748fd79ac59f5752cd0e9498d6a40e3ac1c1875f"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31e450840f2f27699d014cfc8865cc747184286b26d945bcea6042bb6aa4d26e"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01227f8b3e6c8961490d869aa65c99653df80d2f0a7fde8c64ebddab2b9b02fd"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69084fd29bfeff14816666c93a466e85414fe6b7d236cfc108a9c11afa6f7301"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d2b88efe65544a7d5121b0c3b003ebba92bfede2ea3577ce548b69c5235185"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ea961a674172ed2235d990d7edf85d15d8dfa23ab8575e48306371c070cda67"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:5beffdbe766cfe4fb04f30644d822a1080b5359df7db3a63d30fa928375b2720"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:720f3108fb1bfa32e51db58b832898372eb5891e8472a8093008010911e324c5"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c2087dbb76a87ec2c619253e021e4fb20d1a72580feeaa6892b0b3d955175a71"}, - {file = "rpds_py-0.19.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ddd50f18ebc05ec29a0d9271e9dbe93997536da3546677f8ca00b76d477680c"}, {file = "rpds_py-0.19.1.tar.gz", hash = "sha256:31dd5794837f00b46f4096aa8ccaa5972f73a938982e32ed817bb520c465e520"}, ] @@ -1855,7 +1860,7 @@ name = "ruamel-yaml" version = "0.18.6" requires_python = ">=3.7" summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.13\"", ] @@ -1869,7 +1874,7 @@ name = "ruamel-yaml-clib" version = "0.2.8" requires_python = ">=3.6" summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -groups = ["default", "dev", "tests"] +groups = ["default"] marker = "platform_python_implementation == \"CPython\" and python_version < \"3.13\"" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, @@ -1901,29 +1906,29 @@ files = [ [[package]] name = "ruff" -version = "0.5.5" +version = "0.5.6" requires_python = ">=3.7" summary = "An extremely fast Python linter and code formatter, written in Rust." groups = ["dev"] files = [ - {file = "ruff-0.5.5-py3-none-linux_armv6l.whl", hash = "sha256:605d589ec35d1da9213a9d4d7e7a9c761d90bba78fc8790d1c5e65026c1b9eaf"}, - {file = "ruff-0.5.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00817603822a3e42b80f7c3298c8269e09f889ee94640cd1fc7f9329788d7bf8"}, - {file = "ruff-0.5.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:187a60f555e9f865a2ff2c6984b9afeffa7158ba6e1eab56cb830404c942b0f3"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe26fc46fa8c6e0ae3f47ddccfbb136253c831c3289bba044befe68f467bfb16"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad25dd9c5faac95c8e9efb13e15803cd8bbf7f4600645a60ffe17c73f60779b"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f70737c157d7edf749bcb952d13854e8f745cec695a01bdc6e29c29c288fc36e"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cfd7de17cef6ab559e9f5ab859f0d3296393bc78f69030967ca4d87a541b97a0"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09b43e02f76ac0145f86a08e045e2ea452066f7ba064fd6b0cdccb486f7c3e7"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0b856cb19c60cd40198be5d8d4b556228e3dcd545b4f423d1ad812bfdca5884"}, - {file = "ruff-0.5.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3687d002f911e8a5faf977e619a034d159a8373514a587249cc00f211c67a091"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ac9dc814e510436e30d0ba535f435a7f3dc97f895f844f5b3f347ec8c228a523"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:af9bdf6c389b5add40d89b201425b531e0a5cceb3cfdcc69f04d3d531c6be74f"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d40a8533ed545390ef8315b8e25c4bb85739b90bd0f3fe1280a29ae364cc55d8"}, - {file = "ruff-0.5.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cab904683bf9e2ecbbe9ff235bfe056f0eba754d0168ad5407832928d579e7ab"}, - {file = "ruff-0.5.5-py3-none-win32.whl", hash = "sha256:696f18463b47a94575db635ebb4c178188645636f05e934fdf361b74edf1bb2d"}, - {file = "ruff-0.5.5-py3-none-win_amd64.whl", hash = "sha256:50f36d77f52d4c9c2f1361ccbfbd09099a1b2ea5d2b2222c586ab08885cf3445"}, - {file = "ruff-0.5.5-py3-none-win_arm64.whl", hash = "sha256:3191317d967af701f1b73a31ed5788795936e423b7acce82a2b63e26eb3e89d6"}, - {file = "ruff-0.5.5.tar.gz", hash = "sha256:cc5516bdb4858d972fbc31d246bdb390eab8df1a26e2353be2dbc0c2d7f5421a"}, + {file = "ruff-0.5.6-py3-none-linux_armv6l.whl", hash = "sha256:a0ef5930799a05522985b9cec8290b185952f3fcd86c1772c3bdbd732667fdcd"}, + {file = "ruff-0.5.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b652dc14f6ef5d1552821e006f747802cc32d98d5509349e168f6bf0ee9f8f42"}, + {file = "ruff-0.5.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:80521b88d26a45e871f31e4b88938fd87db7011bb961d8afd2664982dfc3641a"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9bc8f328a9f1309ae80e4d392836e7dbc77303b38ed4a7112699e63d3b066ab"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d394940f61f7720ad371ddedf14722ee1d6250fd8d020f5ea5a86e7be217daf"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111a99cdb02f69ddb2571e2756e017a1496c2c3a2aeefe7b988ddab38b416d36"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e395daba77a79f6dc0d07311f94cc0560375ca20c06f354c7c99af3bf4560c5d"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c476acb43c3c51e3c614a2e878ee1589655fa02dab19fe2db0423a06d6a5b1b6"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2ff8003f5252fd68425fd53d27c1f08b201d7ed714bb31a55c9ac1d4c13e2eb"}, + {file = "ruff-0.5.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c94e084ba3eaa80c2172918c2ca2eb2230c3f15925f4ed8b6297260c6ef179ad"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1f77c1c3aa0669fb230b06fb24ffa3e879391a3ba3f15e3d633a752da5a3e670"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f908148c93c02873210a52cad75a6eda856b2cbb72250370ce3afef6fb99b1ed"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:563a7ae61ad284187d3071d9041c08019975693ff655438d8d4be26e492760bd"}, + {file = "ruff-0.5.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:94fe60869bfbf0521e04fd62b74cbca21cbc5beb67cbb75ab33fe8c174f54414"}, + {file = "ruff-0.5.6-py3-none-win32.whl", hash = "sha256:e6a584c1de6f8591c2570e171cc7ce482bb983d49c70ddf014393cd39e9dfaed"}, + {file = "ruff-0.5.6-py3-none-win_amd64.whl", hash = "sha256:d7fe7dccb1a89dc66785d7aa0ac283b2269712d8ed19c63af908fdccca5ccc1a"}, + {file = "ruff-0.5.6-py3-none-win_arm64.whl", hash = "sha256:57c6c0dd997b31b536bff49b9eee5ed3194d60605a4427f735eeb1f9c1b8d264"}, + {file = "ruff-0.5.6.tar.gz", hash = "sha256:07c9e3c2a8e1fe377dd460371c3462671a728c981c3205a5217291422209f642"}, ] [[package]] @@ -1931,7 +1936,7 @@ name = "setuptools" version = "72.1.0" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] files = [ {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, @@ -1941,7 +1946,7 @@ files = [ name = "shexjsg" version = "0.8.2" summary = "ShExJSG - Astract Syntax Tree for the ShEx 2.0 language" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "pyjsg>=0.11.10", ] @@ -1955,7 +1960,7 @@ name = "six" version = "1.16.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" summary = "Python 2 and 3 compatibility utilities" -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -1965,7 +1970,7 @@ files = [ name = "sortedcontainers" version = "2.4.0" summary = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, @@ -1976,7 +1981,7 @@ name = "sparqlslurper" version = "0.5.1" requires_python = ">=3.7.4" summary = "SPARQL Slurper for rdflib" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "rdflib-shim", "rdflib>=5.0.0", @@ -1992,7 +1997,7 @@ name = "sparqlwrapper" version = "2.0.0" requires_python = ">=3.7" summary = "SPARQL Endpoint interface to Python" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "rdflib>=6.1.1", ] @@ -2003,41 +2008,42 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.31" +version = "2.0.32" requires_python = ">=3.7" summary = "Database Abstraction Library" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"", + "importlib-metadata; python_version < \"3.8\"", "typing-extensions>=4.6.0", ] files = [ - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, - {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, - {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, + {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, + {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, ] [[package]] @@ -2056,7 +2062,7 @@ name = "tenacity" version = "9.0.0" requires_python = ">=3.8" summary = "Retry code until it succeeds" -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -2079,7 +2085,7 @@ name = "toolz" version = "0.12.1" requires_python = ">=3.7" summary = "List processing tools and functional utilities" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, @@ -2087,16 +2093,16 @@ files = [ [[package]] name = "tqdm" -version = "4.66.4" +version = "4.66.5" requires_python = ">=3.7" summary = "Fast, Extensible Progress Meter" -groups = ["default", "dev", "tests"] +groups = ["default"] dependencies = [ "colorama; platform_system == \"Windows\"", ] files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [[package]] @@ -2104,7 +2110,7 @@ name = "types-python-dateutil" version = "2.9.0.20240316" requires_python = ">=3.8" summary = "Typing stubs for python-dateutil" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, @@ -2115,7 +2121,7 @@ name = "typing-extensions" version = "4.12.2" requires_python = ">=3.8" summary = "Backported and Experimental Type Hints for Python 3.8+" -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -2126,7 +2132,7 @@ name = "tzdata" version = "2024.1" requires_python = ">=2" summary = "Provider of IANA time zone data" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, @@ -2137,7 +2143,7 @@ name = "uri-template" version = "1.3.0" requires_python = ">=3.7" summary = "RFC 6570 URI Template Processor" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, @@ -2162,7 +2168,7 @@ name = "urllib3" version = "2.2.2" requires_python = ">=3.8" summary = "HTTP library with thread-safe connection pooling, file post, and more." -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, @@ -2173,7 +2179,7 @@ name = "watchdog" version = "4.0.1" requires_python = ">=3.8" summary = "Filesystem events monitoring" -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, @@ -2186,10 +2192,6 @@ files = [ {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, @@ -2208,7 +2210,7 @@ name = "webcolors" version = "24.6.0" requires_python = ">=3.8" summary = "A library for working with the color formats defined by HTML and CSS." -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "webcolors-24.6.0-py3-none-any.whl", hash = "sha256:8cf5bc7e28defd1d48b9e83d5fc30741328305a8195c29a8e668fa45586568a1"}, {file = "webcolors-24.6.0.tar.gz", hash = "sha256:1d160d1de46b3e81e58d0a280d0c78b467dc80f47294b91b1ad8029d2cedb55b"}, @@ -2219,7 +2221,7 @@ name = "werkzeug" version = "3.0.3" requires_python = ">=3.8" summary = "The comprehensive WSGI web application library." -groups = ["dev", "tests"] +groups = ["dev", "plot", "tests"] dependencies = [ "MarkupSafe>=2.1.1", ] @@ -2233,7 +2235,7 @@ name = "wrapt" version = "1.16.0" requires_python = ">=3.6" summary = "Module for decorators, wrappers and monkey patching." -groups = ["default", "dev", "tests"] +groups = ["default"] files = [ {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, @@ -2274,7 +2276,7 @@ name = "zipp" version = "3.19.2" requires_python = ">=3.8" summary = "Backport of pathlib-compatible object wrapper for zip files" -groups = ["default", "dev", "tests"] +groups = ["default", "dev", "plot", "tests"] files = [ {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index b90d92c..8455bed 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -22,7 +22,7 @@ dependencies = [ "dask>=2023.9.2", "tqdm>=4.66.1", 'typing-extensions>=4.12.2;python_version<"3.11"', - "numpydantic>=1.2.2", + "numpydantic>=1.3.0", "black>=24.4.2", "pandas>=2.2.2", ] diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index acac675..6ba881d 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Tuple, Un from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport from numpydantic import NDArray -from pandas import DataFrame +from pandas import DataFrame, Series from pydantic import BaseModel, ConfigDict, Field, model_validator if TYPE_CHECKING: @@ -98,6 +98,11 @@ class DynamicTableMixin(BaseModel): rows, cols = item if isinstance(cols, (int, slice)): cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + data = self._slice_range(rows, cols) return DataFrame.from_dict(data) else: @@ -110,7 +115,14 @@ class DynamicTableMixin(BaseModel): cols = self.colnames elif isinstance(cols, str): cols = [cols] - + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val data = {k: self._columns[k][rows] for k in cols} return data @@ -244,7 +256,9 @@ class VectorIndexMixin(BaseModel): DYNAMIC_TABLE_IMPORTS = Imports( imports=[ - Import(module="pandas", objects=[ObjectImport(name="DataFrame")]), + Import( + module="pandas", objects=[ObjectImport(name="DataFrame"), ObjectImport(name="Series")] + ), Import( module="typing", objects=[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 6fa0e8c..39b21f7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -1,14 +1,9 @@ from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -import numpy as np -from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container -from pandas import DataFrame -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator + +from ...hdmf_common.v1_8_0.hdmf_common_base import Data +from pandas import DataFrame, Series +from typing import Any, ClassVar, List, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, model_validator from numpydantic import NDArray, Shape metamodel_version = "None" @@ -198,6 +193,11 @@ class DynamicTableMixin(BaseModel): rows, cols = item if isinstance(cols, (int, slice)): cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + data = self._slice_range(rows, cols) return DataFrame.from_dict(data) else: @@ -210,8 +210,14 @@ class DynamicTableMixin(BaseModel): cols = self.colnames elif isinstance(cols, str): cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 73f08ef..2a3b1d0 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -5,6 +5,8 @@ import pytest # FIXME: Make this just be the output of the provider by patching into import machinery from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( + Device, + DynamicTableRegion, ElectricalSeries, ElectrodeGroup, ExtracellularEphysElectrodes, @@ -18,18 +20,95 @@ def electrical_series() -> Tuple["ElectricalSeries", "ExtracellularEphysElectrod """ n_electrodes = 5 n_times = 100 - data = np.arange(0, n_electrodes * n_times).reshape(n_times, n_electrodes) + data = np.arange(0, n_electrodes * n_times).reshape(n_times, n_electrodes).astype(float) timestamps = np.linspace(0, 1, n_times) + device = Device(name="my electrode") + # electrode group is the physical description of the electrodes electrode_group = ElectrodeGroup( name="GroupA", + device=device, + description="an electrode group", + location="you know where it is", ) # make electrodes tables electrodes = ExtracellularEphysElectrodes( + description="idk these are also electrodes", id=np.arange(0, n_electrodes), - x=np.arange(0, n_electrodes), - y=np.arange(n_electrodes, n_electrodes * 2), + x=np.arange(0, n_electrodes).astype(float), + y=np.arange(n_electrodes, n_electrodes * 2).astype(float), group=[electrode_group] * n_electrodes, + group_name=[electrode_group.name] * n_electrodes, + location=[str(i) for i in range(n_electrodes)], + extra_column=["sup"] * n_electrodes, ) + + electrical_series = ElectricalSeries( + name="my recording!", + electrodes=DynamicTableRegion( + table=electrodes, value=np.arange(0, n_electrodes), name="electrodes", description="hey" + ), + timestamps=timestamps, + data=data, + ) + return electrical_series, electrodes + + +def test_dynamictable_indexing(electrical_series): + """ + Can index values from a dynamictable + """ + series, electrodes = electrical_series + + colnames = [ + "id", + "x", + "y", + "group", + "group_name", + "location", + "extra_column", + ] + dtypes = [ + np.dtype("int64"), + np.dtype("float64"), + np.dtype("float64"), + ] + ([np.dtype("O")] * 4) + + row = electrodes[0] + # successfully get a single row :) + assert row.shape == (1, 7) + assert row.dtypes.values.tolist() == dtypes + assert row.columns.tolist() == colnames + + # slice a range of rows + rows = electrodes[0:3] + assert rows.shape == (3, 7) + assert rows.dtypes.values.tolist() == dtypes + assert rows.columns.tolist() == colnames + + # get a single column + col = electrodes["y"] + assert all(col == [5, 6, 7, 8, 9]) + + # get a single cell + val = electrodes[0, "y"] + assert val == 5 + val = electrodes[0, 2] + assert val == 5 + + # get a slice of rows and columns + subsection = electrodes[0:3, 0:3] + assert subsection.shape == (3, 3) + assert subsection.columns.tolist() == colnames[0:3] + assert subsection.dtypes.values.tolist() == dtypes[0:3] + + +def test_dynamictable_append_column(): + pass + + +def test_dynamictable_append_row(): + pass From 51ceb143091e656098b6e5b5ccc3a3ca3d66efde Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 20:56:08 -0700 Subject: [PATCH 19/61] regenerate models --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 1 - .../hdmf_common/v1_1_0/hdmf_common_table.py | 17 +- .../hdmf_common/v1_1_2/hdmf_common_table.py | 17 +- .../hdmf_common/v1_1_3/hdmf_common_table.py | 17 +- .../pydantic/hdmf_common/v1_2_0/__init__.py | 1 + .../hdmf_common/v1_2_0/hdmf_common_base.py | 88 ++++ .../hdmf_common/v1_2_0/hdmf_common_sparse.py | 133 ++++++ .../hdmf_common/v1_2_0/hdmf_common_table.py | 450 +++++++++++++++++ .../pydantic/hdmf_common/v1_2_0/namespace.py | 83 ++++ .../pydantic/hdmf_common/v1_2_1/__init__.py | 1 + .../hdmf_common/v1_2_1/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_2_1/hdmf_common_sparse.py | 134 ++++++ .../hdmf_common/v1_2_1/hdmf_common_table.py | 450 +++++++++++++++++ .../pydantic/hdmf_common/v1_2_1/namespace.py | 83 ++++ .../pydantic/hdmf_common/v1_3_0/__init__.py | 1 + .../hdmf_common/v1_3_0/hdmf_common_base.py | 104 ++++ .../v1_3_0/hdmf_common_resources.py | 177 +++++++ .../hdmf_common/v1_3_0/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_3_0/hdmf_common_table.py | 450 +++++++++++++++++ .../pydantic/hdmf_common/v1_3_0/namespace.py | 86 ++++ .../pydantic/hdmf_common/v1_4_0/__init__.py | 1 + .../hdmf_common/v1_4_0/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_4_0/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_4_0/hdmf_common_table.py | 423 ++++++++++++++++ .../pydantic/hdmf_common/v1_4_0/namespace.py | 77 +++ .../hdmf_common/v1_5_0/hdmf_common_table.py | 17 +- .../pydantic/hdmf_common/v1_5_1/__init__.py | 1 + .../hdmf_common/v1_5_1/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_5_1/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_5_1/hdmf_common_table.py | 452 ++++++++++++++++++ .../pydantic/hdmf_common/v1_5_1/namespace.py | 78 +++ .../pydantic/hdmf_common/v1_6_0/__init__.py | 1 + .../hdmf_common/v1_6_0/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_6_0/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_6_0/hdmf_common_table.py | 452 ++++++++++++++++++ .../pydantic/hdmf_common/v1_6_0/namespace.py | 78 +++ .../pydantic/hdmf_common/v1_7_0/__init__.py | 1 + .../hdmf_common/v1_7_0/hdmf_common_base.py | 104 ++++ .../hdmf_common/v1_7_0/hdmf_common_sparse.py | 110 +++++ .../hdmf_common/v1_7_0/hdmf_common_table.py | 452 ++++++++++++++++++ .../pydantic/hdmf_common/v1_7_0/namespace.py | 78 +++ .../hdmf_common/v1_8_0/hdmf_common_table.py | 13 +- .../v0_1_0/hdmf_experimental_experimental.py | 4 +- .../v0_1_0/hdmf_experimental_resources.py | 4 +- .../hdmf_experimental/v0_1_0/namespace.py | 7 +- .../hdmf_experimental/v0_2_0/__init__.py | 1 + .../v0_2_0/hdmf_experimental_experimental.py | 91 ++++ .../v0_2_0/hdmf_experimental_resources.py | 199 ++++++++ .../hdmf_experimental/v0_2_0/namespace.py | 89 ++++ .../hdmf_experimental/v0_3_0/__init__.py | 1 + .../v0_3_0/hdmf_experimental_experimental.py | 91 ++++ .../v0_3_0/hdmf_experimental_resources.py | 203 ++++++++ .../hdmf_experimental/v0_3_0/namespace.py | 89 ++++ .../hdmf_experimental/v0_4_0/__init__.py | 1 + .../v0_4_0/hdmf_experimental_experimental.py | 91 ++++ .../v0_4_0/hdmf_experimental_resources.py | 225 +++++++++ .../hdmf_experimental/v0_4_0/namespace.py | 90 ++++ .../v1_1_0/hdmf-common.nwb.language.yaml | 2 +- .../v1_1_2/hdmf-common.nwb.language.yaml | 2 +- .../v1_1_3/hdmf-common.nwb.language.yaml | 2 +- .../hdmf_common/v1_2_0/hdmf-common.base.yaml | 33 ++ .../v1_2_0/hdmf-common.nwb.language.yaml | 94 ++++ .../v1_2_0/hdmf-common.sparse.yaml | 81 ++++ .../hdmf_common/v1_2_0/hdmf-common.table.yaml | 193 ++++++++ .../linkml/hdmf_common/v1_2_0/namespace.yaml | 17 + .../hdmf_common/v1_2_1/hdmf-common.base.yaml | 46 ++ .../v1_2_1/hdmf-common.nwb.language.yaml | 94 ++++ .../v1_2_1/hdmf-common.sparse.yaml | 83 ++++ .../hdmf_common/v1_2_1/hdmf-common.table.yaml | 193 ++++++++ .../linkml/hdmf_common/v1_2_1/namespace.yaml | 17 + .../hdmf_common/v1_3_0/hdmf-common.base.yaml | 46 ++ .../v1_3_0/hdmf-common.nwb.language.yaml | 94 ++++ .../v1_3_0/hdmf-common.resources.yaml | 158 ++++++ .../v1_3_0/hdmf-common.sparse.yaml | 68 +++ .../hdmf_common/v1_3_0/hdmf-common.table.yaml | 193 ++++++++ .../linkml/hdmf_common/v1_3_0/namespace.yaml | 18 + .../hdmf_common/v1_4_0/hdmf-common.base.yaml | 46 ++ .../v1_4_0/hdmf-common.nwb.language.yaml | 94 ++++ .../v1_4_0/hdmf-common.sparse.yaml | 68 +++ .../hdmf_common/v1_4_0/hdmf-common.table.yaml | 173 +++++++ .../linkml/hdmf_common/v1_4_0/namespace.yaml | 17 + .../v1_5_0/hdmf-common.nwb.language.yaml | 2 +- .../hdmf_common/v1_5_1/hdmf-common.base.yaml | 46 ++ .../v1_5_1/hdmf-common.nwb.language.yaml | 94 ++++ .../v1_5_1/hdmf-common.sparse.yaml | 68 +++ .../hdmf_common/v1_5_1/hdmf-common.table.yaml | 192 ++++++++ .../linkml/hdmf_common/v1_5_1/namespace.yaml | 17 + .../hdmf_common/v1_6_0/hdmf-common.base.yaml | 46 ++ .../v1_6_0/hdmf-common.nwb.language.yaml | 94 ++++ .../v1_6_0/hdmf-common.sparse.yaml | 68 +++ .../hdmf_common/v1_6_0/hdmf-common.table.yaml | 192 ++++++++ .../linkml/hdmf_common/v1_6_0/namespace.yaml | 17 + .../hdmf_common/v1_7_0/hdmf-common.base.yaml | 46 ++ .../v1_7_0/hdmf-common.nwb.language.yaml | 94 ++++ .../v1_7_0/hdmf-common.sparse.yaml | 68 +++ .../hdmf_common/v1_7_0/hdmf-common.table.yaml | 192 ++++++++ .../linkml/hdmf_common/v1_7_0/namespace.yaml | 17 + .../v1_8_0/hdmf-common.nwb.language.yaml | 2 +- .../hdmf-experimental.experimental.yaml | 2 +- .../hdmf-experimental.nwb.language.yaml | 2 +- .../v0_1_0/hdmf-experimental.resources.yaml | 2 +- .../hdmf-experimental.experimental.yaml | 32 ++ .../hdmf-experimental.nwb.language.yaml | 94 ++++ .../v0_2_0/hdmf-experimental.resources.yaml | 196 ++++++++ .../hdmf_experimental/v0_2_0/namespace.yaml | 17 + .../hdmf-experimental.experimental.yaml | 32 ++ .../hdmf-experimental.nwb.language.yaml | 94 ++++ .../v0_3_0/hdmf-experimental.resources.yaml | 199 ++++++++ .../hdmf_experimental/v0_3_0/namespace.yaml | 17 + .../hdmf-experimental.experimental.yaml | 32 ++ .../hdmf-experimental.nwb.language.yaml | 94 ++++ .../v0_4_0/hdmf-experimental.resources.yaml | 222 +++++++++ .../hdmf_experimental/v0_4_0/namespace.yaml | 17 + .../hdmf-experimental.nwb.language.yaml | 2 +- 114 files changed, 10780 insertions(+), 34 deletions(-) create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py create mode 100644 nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml create mode 100644 nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 6ba881d..e4534de 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -123,7 +123,6 @@ class DynamicTableMixin(BaseModel): # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) data[k] = val - data = {k: self._columns[k][rows] for k in cols} return data def __setitem__(self, key: str, value: Any) -> None: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 0647a30..874bfe5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys import numpy as np -from pandas import DataFrame +from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator @@ -197,6 +197,11 @@ class DynamicTableMixin(BaseModel): rows, cols = item if isinstance(cols, (int, slice)): cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + data = self._slice_range(rows, cols) return DataFrame.from_dict(data) else: @@ -209,8 +214,14 @@ class DynamicTableMixin(BaseModel): cols = self.colnames elif isinstance(cols, str): cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 21fc9c0..2818a13 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys import numpy as np -from pandas import DataFrame +from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator @@ -197,6 +197,11 @@ class DynamicTableMixin(BaseModel): rows, cols = item if isinstance(cols, (int, slice)): cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + data = self._slice_range(rows, cols) return DataFrame.from_dict(data) else: @@ -209,8 +214,14 @@ class DynamicTableMixin(BaseModel): cols = self.colnames elif isinstance(cols, str): cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 0a4ed6e..12f24d7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys import numpy as np -from pandas import DataFrame +from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator @@ -197,6 +197,11 @@ class DynamicTableMixin(BaseModel): rows, cols = item if isinstance(cols, (int, slice)): cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + data = self._slice_range(rows, cols) return DataFrame.from_dict(data) else: @@ -209,8 +214,14 @@ class DynamicTableMixin(BaseModel): cols = self.colnames elif isinstance(cols, str): cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py new file mode 100644 index 0000000..1d657d9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py @@ -0,0 +1,88 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py new file mode 100644 index 0000000..6cf3e21 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py @@ -0,0 +1,133 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(ConfiguredBaseModel): + """ + a compressed sparse row matrix + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: NDArray[Shape["2 null"], int] = Field( + ..., + description="""the shape of this sparse matrix""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}} + }, + ) + indices: CSRMatrixIndices = Field(..., description="""column indices""") + indptr: CSRMatrixIndptr = Field(..., description="""index pointer""") + data: CSRMatrixData = Field(..., description="""values in the matrix""") + + +class CSRMatrixIndices(ConfiguredBaseModel): + """ + column indices + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["indices"] = Field( + "indices", + json_schema_extra={ + "linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"} + }, + ) + + +class CSRMatrixIndptr(ConfiguredBaseModel): + """ + index pointer + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["indptr"] = Field( + "indptr", + json_schema_extra={ + "linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"} + }, + ) + + +class CSRMatrixData(ConfiguredBaseModel): + """ + values in the matrix + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixIndices.model_rebuild() +CSRMatrixIndptr.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py new file mode 100644 index 0000000..e7d72e4 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -0,0 +1,450 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container +from pandas import DataFrame, Series +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator + +metamodel_version = "None" +version = "1.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.value[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key: Union[int, slice], value: Any) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorDataMixin", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> None: + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: str = Field( + ..., description="""Description of what this table region points to.""" + ) + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VocabData(VectorData): + """ + Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + vocabulary: NDArray[Shape["* null"], str] = Field( + ..., + description="""The available items in the controlled vocabulary.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "null"}]}}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +VocabData.model_rebuild() +DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py new file mode 100644 index 0000000..62d22cb --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py @@ -0,0 +1,83 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_2_0.hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) +from ...hdmf_common.v1_2_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + VocabData, + DynamicTable, +) +from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container + +metamodel_version = "None" +version = "1.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py new file mode 100644 index 0000000..e75baf7 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.2.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + value: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py new file mode 100644 index 0000000..62bc6ef --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py @@ -0,0 +1,134 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_2_1.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.2.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + a compressed sparse row matrix + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: NDArray[Shape["2 null"], int] = Field( + ..., + description="""the shape of this sparse matrix""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "null", "exact_cardinality": 2}]}} + }, + ) + indices: CSRMatrixIndices = Field(..., description="""column indices""") + indptr: CSRMatrixIndptr = Field(..., description="""index pointer""") + data: CSRMatrixData = Field(..., description="""values in the matrix""") + + +class CSRMatrixIndices(ConfiguredBaseModel): + """ + column indices + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["indices"] = Field( + "indices", + json_schema_extra={ + "linkml_meta": {"equals_string": "indices", "ifabsent": "string(indices)"} + }, + ) + + +class CSRMatrixIndptr(ConfiguredBaseModel): + """ + index pointer + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["indptr"] = Field( + "indptr", + json_schema_extra={ + "linkml_meta": {"equals_string": "indptr", "ifabsent": "string(indptr)"} + }, + ) + + +class CSRMatrixData(ConfiguredBaseModel): + """ + values in the matrix + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixIndices.model_rebuild() +CSRMatrixIndptr.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py new file mode 100644 index 0000000..86db0e3 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -0,0 +1,450 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container +from pandas import DataFrame, Series +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator + +metamodel_version = "None" +version = "1.2.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.value[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key: Union[int, slice], value: Any) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorDataMixin", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> None: + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: str = Field( + ..., description="""Description of what this table region points to.""" + ) + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VocabData(VectorData): + """ + Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + vocabulary: NDArray[Shape["* null"], str] = Field( + ..., + description="""The available items in the controlled vocabulary.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "null"}]}}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +VocabData.model_rebuild() +DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py new file mode 100644 index 0000000..55f5dc6 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py @@ -0,0 +1,83 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_2_1.hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) +from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_2_1.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + VocabData, + DynamicTable, +) + +metamodel_version = "None" +version = "1.2.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py new file mode 100644 index 0000000..83003ce --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + value: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py new file mode 100644 index 0000000..00eaee4 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py @@ -0,0 +1,177 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.resources/", + "id": "hdmf-common.resources", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.resources", + } +) + + +class ExternalResources(Container): + """ + A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.resources", "tree_root": True} + ) + + name: str = Field(...) + keys: ExternalResourcesKeys = Field( + ..., + description="""A table for storing user terms that are used to refer to external resources.""", + ) + resources: ExternalResourcesResources = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + objects: ExternalResourcesObjects = Field( + ..., + description="""A table for identifying which objects in a file contain references to external resources.""", + ) + object_keys: ExternalResourcesObjectKeys = Field( + ..., description="""A table for identifying which objects use which keys.""" + ) + + +class ExternalResourcesKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) + + name: Literal["keys"] = Field( + "keys", + json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, + ) + key_name: str = Field( + ..., + description="""The user term that maps to one or more resources in the 'resources' table.""", + ) + + +class ExternalResourcesResources(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) + + name: Literal["resources"] = Field( + "resources", + json_schema_extra={ + "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"} + }, + ) + keytable_idx: int = Field(..., description="""The index to the key in the 'keys' table.""") + resource_name: str = Field( + ..., + description="""The name of the online resource (e.g., website, database) that has the entity.""", + ) + resource_id: str = Field( + ..., description="""The unique identifier for the resource entity at the resource.""" + ) + uri: str = Field( + ..., + description="""The URI for the resource entity this reference applies to. This can be an empty string.""", + ) + + +class ExternalResourcesObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) + + name: Literal["objects"] = Field( + "objects", + json_schema_extra={ + "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} + }, + ) + object_id: str = Field(..., description="""The UUID for the object.""") + field: str = Field( + ..., + description="""The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values.""", + ) + + +class ExternalResourcesObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.resources"}) + + name: Literal["object_keys"] = Field( + "object_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} + }, + ) + objecttable_idx: int = Field( + ..., description="""The index to the 'objects' table for the object that holds the key.""" + ) + keytable_idx: int = Field(..., description="""The index to the 'keys' table for the key.""") + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ExternalResources.model_rebuild() +ExternalResourcesKeys.model_rebuild() +ExternalResourcesResources.model_rebuild() +ExternalResourcesObjects.model_rebuild() +ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py new file mode 100644 index 0000000..3d4d4af --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_3_0.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: List[int] = Field( + ..., description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py new file mode 100644 index 0000000..d221165 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -0,0 +1,450 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container +from pandas import DataFrame, Series +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.value[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key: Union[int, slice], value: Any) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorDataMixin", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> None: + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: str = Field( + ..., description="""Description of what this table region points to.""" + ) + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VocabData(VectorData): + """ + Data that come from a controlled vocabulary of text values. A data value of i corresponds to the i-th element in the 'vocabulary' array attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + vocabulary: NDArray[Shape["* null"], str] = Field( + ..., + description="""The available items in the controlled vocabulary.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "null"}]}}}, + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +VocabData.model_rebuild() +DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py new file mode 100644 index 0000000..a2dcc70 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py @@ -0,0 +1,86 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_3_0.hdmf_common_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesResources, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) +from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_3_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_3_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + VocabData, + DynamicTable, +) + +metamodel_version = "None" +version = "1.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.resources", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py new file mode 100644 index 0000000..3adb8b8 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + value: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py new file mode 100644 index 0000000..f304f3a --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_4_0.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: List[int] = Field( + ..., description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py new file mode 100644 index 0000000..dbc86c7 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -0,0 +1,423 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container +from pandas import DataFrame, Series +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from numpydantic import NDArray, Shape +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator + +metamodel_version = "None" +version = "1.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.value[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key: Union[int, slice], value: Any) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorDataMixin", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> None: + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: str = Field( + ..., description="""Description of what this table region points to.""" + ) + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +DynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py new file mode 100644 index 0000000..db59f28 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py @@ -0,0 +1,77 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_4_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, +) + +metamodel_version = "None" +version = "1.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 294f168..f7edfe3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys import numpy as np from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container -from pandas import DataFrame +from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from numpydantic import NDArray, Shape @@ -198,6 +198,11 @@ class DynamicTableMixin(BaseModel): rows, cols = item if isinstance(cols, (int, slice)): cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + data = self._slice_range(rows, cols) return DataFrame.from_dict(data) else: @@ -210,8 +215,14 @@ class DynamicTableMixin(BaseModel): cols = self.colnames elif isinstance(cols, str): cols = [cols] - - data = {k: self._columns[k][rows] for k in cols} + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py new file mode 100644 index 0000000..e360db2 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.5.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + value: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py new file mode 100644 index 0000000..30f0c4b --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.5.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: List[int] = Field( + ..., description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py new file mode 100644 index 0000000..5d3a3bf --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -0,0 +1,452 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container +from pandas import DataFrame, Series +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.5.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.value[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key: Union[int, slice], value: Any) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorDataMixin", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> None: + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: str = Field( + ..., description="""Description of what this table region points to.""" + ) + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + value: Optional[List[DynamicTable]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} + ) + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +DynamicTable.model_rebuild() +AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py new file mode 100644 index 0000000..836122e --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py @@ -0,0 +1,78 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_5_1.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +metamodel_version = "None" +version = "1.5.1" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py new file mode 100644 index 0000000..49293d1 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.6.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + value: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py new file mode 100644 index 0000000..ce6c0ed --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.6.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: List[int] = Field( + ..., description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py new file mode 100644 index 0000000..e6f9d8c --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -0,0 +1,452 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container +from pandas import DataFrame, Series +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.6.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.value[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key: Union[int, slice], value: Any) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorDataMixin", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> None: + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: str = Field( + ..., description="""Description of what this table region points to.""" + ) + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + value: Optional[List[DynamicTable]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} + ) + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +DynamicTable.model_rebuild() +AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py new file mode 100644 index 0000000..1dc832f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py @@ -0,0 +1,78 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_6_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +metamodel_version = "None" +version = "1.6.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py new file mode 100644 index 0000000..51a093f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py @@ -0,0 +1,104 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np + +metamodel_version = "None" +version = "1.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.base/", + "id": "hdmf-common.base", + "imports": ["hdmf-common.nwb.language"], + "name": "hdmf-common.base", + } +) + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.base", "tree_root": True} + ) + + value: Optional[List[Container]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "Container"}]}} + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py new file mode 100644 index 0000000..f59d4a7 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py @@ -0,0 +1,110 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_base import Container +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.sparse/", + "id": "hdmf-common.sparse", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.sparse", + } +) + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.sparse", "tree_root": True} + ) + + name: str = Field(...) + shape: List[int] = Field( + ..., description="""The shape (number of rows, number of columns) of this sparse matrix.""" + ) + indices: NDArray[Shape["* number_of_non_zero_values"], int] = Field( + ..., + description="""The column indices.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_non_zero_values"}]}} + }, + ) + indptr: NDArray[Shape["* number_of_rows_in_the_matrix_1"], int] = Field( + ..., + description="""The row index pointer.""", + json_schema_extra={ + "linkml_meta": {"array": {"dimensions": [{"alias": "number_of_rows_in_the_matrix_1"}]}} + }, + ) + data: CSRMatrixData = Field(..., description="""The non-zero values in the matrix.""") + + +class CSRMatrixData(ConfiguredBaseModel): + """ + The non-zero values in the matrix. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-common.sparse"}) + + name: Literal["data"] = Field( + "data", + json_schema_extra={"linkml_meta": {"equals_string": "data", "ifabsent": "string(data)"}}, + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() +CSRMatrixData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py new file mode 100644 index 0000000..1875be8 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -0,0 +1,452 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container +from pandas import DataFrame, Series +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "1.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class VectorIndexMixin(BaseModel): + """ + Mixin class to give VectorIndex indexing abilities + """ + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + target: Optional["VectorData"] = None + + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + """ + Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` + """ + + start = 0 if arg == 0 else self.value[arg - 1] + end = self.value[arg] + return self.target.array[slice(start, end)] + + def __getitem__(self, item: Union[int, slice]) -> Any: + if self.target is None: + return self.value[item] + elif type(self.target).__name__ == "VectorData": + if isinstance(item, int): + return self._getitem_helper(item) + else: + idx = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in idx] + else: + raise NotImplementedError("DynamicTableRange not supported yet") + + def __setitem__(self, key: Union[int, slice], value: Any) -> None: + if self._index: + # VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + +class DynamicTableMixin(BaseModel): + """ + Mixin to make DynamicTable subclasses behave like tables/dataframes + + Mimicking some of the behavior from :class:`hdmf.common.table.DynamicTable` + but simplifying along the way :) + """ + + model_config = ConfigDict(extra="allow") + __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "name", + "colnames", + "description", + ) + + # overridden by subclass but implemented here for testing and typechecking purposes :) + colnames: List[str] = Field(default_factory=list) + + @property + def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: + return {k: getattr(self, k) for i, k in enumerate(self.colnames)} + + @property + def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: + return [getattr(self, k) for i, k in enumerate(self.colnames)] + + @overload + def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... + + @overload + def __getitem__(self, item: int) -> DataFrame: ... + + @overload + def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... + + @overload + def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ + DataFrame, + list, + "NDArray", + "VectorDataMixin", + ]: ... + + @overload + def __getitem__(self, item: slice) -> DataFrame: ... + + def __getitem__( + self, + item: Union[ + str, + int, + slice, + Tuple[int, Union[int, str]], + Tuple[Union[int, slice], ...], + ], + ) -> Any: + """ + Get an item from the table + + If item is... + + - ``str`` : get the column with this name + - ``int`` : get the row at this index + - ``tuple[int, int]`` : get a specific cell value eg. (0,1) gets the 0th row and 1st column + - ``tuple[int, str]`` : get a specific cell value eg. (0, 'colname') + gets the 0th row from ``colname`` + - ``tuple[int | slice, int | slice]`` : get a range of cells from a range of columns. + returns as a :class:`pandas.DataFrame` + """ + if isinstance(item, str): + return self._columns[item] + if isinstance(item, (int, slice)): + return DataFrame.from_dict(self._slice_range(item)) + elif isinstance(item, tuple): + if len(item) != 2: + raise ValueError( + "DynamicTables are 2-dimensional, can't index with more than 2 indices like" + f" {item}" + ) + + # all other cases are tuples of (rows, cols) + rows, cols = item + if isinstance(cols, (int, slice)): + cols = self.colnames[cols] + + if isinstance(rows, int) and isinstance(cols, str): + # single scalar value + return self._columns[cols][rows] + + data = self._slice_range(rows, cols) + return DataFrame.from_dict(data) + else: + raise ValueError(f"Unsure how to get item with key {item}") + + def _slice_range( + self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: + if cols is None: + cols = self.colnames + elif isinstance(cols, str): + cols = [cols] + data = {} + for k in cols: + val = self._columns[k][rows] + if isinstance(val, BaseModel): + # special case where pandas will unpack a pydantic model + # into {n_fields} rows, rather than keeping it in a dict + val = Series([val]) + data[k] = val + return data + + def __setitem__(self, key: str, value: Any) -> None: + raise NotImplementedError("TODO") + + def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): + """ + Add a column, appending it to ``colnames`` + """ + # don't use this while building the model + if not getattr(self, "__pydantic_complete__", False): + return super().__setattr__(key, value) + + if key not in self.model_fields_set and not key.endswith("_index"): + self.colnames.append(key) + + return super().__setattr__(key, value) + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> None: + """ + Construct colnames from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "colnames" not in model: + colnames = [ + k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") + ] + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + ] + model["colnames"].extend(colnames) + return model + + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._columns.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common.table/", + "id": "hdmf-common.table", + "imports": ["hdmf-common.base", "hdmf-common.nwb.language"], + "name": "hdmf-common.table", + } +) + + +class VectorData(VectorDataMixin): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorIndexMixin): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + target: VectorData = Field( + ..., description="""Reference to the target dataset that this index applies to.""" + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field( + "element_id", json_schema_extra={"linkml_meta": {"ifabsent": "string(element_id)"}} + ) + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + table: DynamicTable = Field( + ..., description="""Reference to the DynamicTable object that this region applies to.""" + ) + description: str = Field( + ..., description="""Description of what this table region points to.""" + ) + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(DynamicTableMixin): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-common.table", "tree_root": True} + ) + + value: Optional[List[DynamicTable]] = Field( + None, json_schema_extra={"linkml_meta": {"any_of": [{"range": "DynamicTable"}]}} + ) + name: str = Field(...) + colnames: List[str] = Field( + ..., + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: str = Field(..., description="""Description of what is in this dynamic table.""") + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, + ) + vector_data: Optional[List[VectorData]] = Field( + None, description="""Vector columns, including index columns, of this dynamic table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +DynamicTable.model_rebuild() +AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py new file mode 100644 index 0000000..7d70e39 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py @@ -0,0 +1,78 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_7_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +metamodel_version = "None" +version = "1.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-common"}, + }, + "default_prefix": "hdmf-common/", + "description": "Common data structures provided by HDMF", + "id": "hdmf-common", + "imports": [ + "hdmf-common.base", + "hdmf-common.table", + "hdmf-common.sparse", + "hdmf-common.nwb.language", + ], + "name": "hdmf-common", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 39b21f7..ef6ba01 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -1,9 +1,14 @@ from __future__ import annotations - -from ...hdmf_common.v1_8_0.hdmf_common_base import Data +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +import numpy as np +from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, model_validator +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from numpydantic import NDArray, Shape metamodel_version = "None" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index 0303e13..913bcfc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -7,7 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData +from ...hdmf_common.v1_4_0.hdmf_common_table import VectorData from numpydantic import NDArray, Shape metamodel_version = "None" @@ -55,7 +55,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.experimental/", "id": "hdmf-experimental.experimental", - "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.experimental", } ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index 366822a..6b84d69 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -7,7 +7,7 @@ import sys from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data +from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data metamodel_version = "None" version = "0.1.0" @@ -53,7 +53,7 @@ linkml_meta = LinkMLMeta( }, "default_prefix": "hdmf-experimental.resources/", "id": "hdmf-experimental.resources", - "imports": ["../../hdmf_common/v1_5_0/namespace", "hdmf-experimental.nwb.language"], + "imports": ["../../hdmf_common/v1_4_0/namespace", "hdmf-experimental.nwb.language"], "name": "hdmf-experimental.resources", } ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py index 69ffad1..7ea10f7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py @@ -15,15 +15,14 @@ from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import ( ExternalResourcesObjects, ExternalResourcesObjectKeys, ) -from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData -from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer -from ...hdmf_common.v1_5_0.hdmf_common_table import ( +from ...hdmf_common.v1_4_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_4_0.hdmf_common_table import ( VectorData, VectorIndex, ElementIdentifiers, DynamicTableRegion, DynamicTable, - AlignedDynamicTable, ) from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py new file mode 100644 index 0000000..63f582a --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py @@ -0,0 +1,91 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_table import VectorData +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "0.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.experimental/", + "id": "hdmf-experimental.experimental", + "imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.experimental", + } +) + + +class EnumData(VectorData): + """ + Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.experimental", "tree_root": True} + ) + + name: str = Field(...) + elements: VectorData = Field( + ..., + description="""Reference to the VectorData object that contains the enumerable elements""", + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py new file mode 100644 index 0000000..a9ea6d5 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py @@ -0,0 +1,199 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data + +metamodel_version = "None" +version = "0.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.resources/", + "id": "hdmf-experimental.resources", + "imports": ["../../hdmf_common/v1_5_1/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.resources", + } +) + + +class ExternalResources(Container): + """ + A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.resources", "tree_root": True} + ) + + name: str = Field(...) + keys: ExternalResourcesKeys = Field( + ..., + description="""A table for storing user terms that are used to refer to external resources.""", + ) + entities: ExternalResourcesEntities = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + resources: ExternalResourcesResources = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + objects: ExternalResourcesObjects = Field( + ..., + description="""A table for identifying which objects in a file contain references to external resources.""", + ) + object_keys: ExternalResourcesObjectKeys = Field( + ..., description="""A table for identifying which objects use which keys.""" + ) + + +class ExternalResourcesKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["keys"] = Field( + "keys", + json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, + ) + key: str = Field( + ..., + description="""The user term that maps to one or more resources in the 'resources' table.""", + ) + + +class ExternalResourcesEntities(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["entities"] = Field( + "entities", + json_schema_extra={ + "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} + }, + ) + keys_idx: int = Field(..., description="""The index to the key in the 'keys' table.""") + resources_idx: int = Field(..., description="""The index into the 'resources' table""") + entity_id: str = Field(..., description="""The unique identifier entity.""") + entity_uri: str = Field( + ..., + description="""The URI for the entity this reference applies to. This can be an empty string.""", + ) + + +class ExternalResourcesResources(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["resources"] = Field( + "resources", + json_schema_extra={ + "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"} + }, + ) + resource: str = Field(..., description="""The name of the resource.""") + resource_uri: str = Field( + ..., description="""The URI for the resource. This can be an empty string.""" + ) + + +class ExternalResourcesObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["objects"] = Field( + "objects", + json_schema_extra={ + "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} + }, + ) + object_id: str = Field(..., description="""The UUID for the object.""") + relative_path: str = Field( + ..., + description="""The relative path from the container with the object_id to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the container is a dataset which contains the value(s) that is associated with an external resource.""", + ) + field: str = Field( + ..., + description="""The field of the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + ) + + +class ExternalResourcesObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["object_keys"] = Field( + "object_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} + }, + ) + objects_idx: int = Field( + ..., description="""The index to the 'objects' table for the object that holds the key.""" + ) + keys_idx: int = Field(..., description="""The index to the 'keys' table for the key.""") + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ExternalResources.model_rebuild() +ExternalResourcesKeys.model_rebuild() +ExternalResourcesEntities.model_rebuild() +ExternalResourcesResources.model_rebuild() +ExternalResourcesObjects.model_rebuild() +ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py new file mode 100644 index 0000000..1345536 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py @@ -0,0 +1,89 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_experimental.v0_2_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesEntities, + ExternalResourcesResources, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) +from ...hdmf_common.v1_5_1.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_5_1.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) +from ...hdmf_experimental.v0_2_0.hdmf_experimental_experimental import EnumData + +metamodel_version = "None" +version = "0.2.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental/", + "description": ( + "Experimental data structures provided by HDMF. These are not " + "guaranteed to be available in the future." + ), + "id": "hdmf-experimental", + "imports": [ + "hdmf-experimental.experimental", + "hdmf-experimental.resources", + "hdmf-experimental.nwb.language", + ], + "name": "hdmf-experimental", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py new file mode 100644 index 0000000..92d347a --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py @@ -0,0 +1,91 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_table import VectorData +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "0.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.experimental/", + "id": "hdmf-experimental.experimental", + "imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.experimental", + } +) + + +class EnumData(VectorData): + """ + Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.experimental", "tree_root": True} + ) + + name: str = Field(...) + elements: VectorData = Field( + ..., + description="""Reference to the VectorData object that contains the enumerable elements""", + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py new file mode 100644 index 0000000..c2fec76 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py @@ -0,0 +1,203 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data + +metamodel_version = "None" +version = "0.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.resources/", + "id": "hdmf-experimental.resources", + "imports": ["../../hdmf_common/v1_6_0/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.resources", + } +) + + +class ExternalResources(Container): + """ + A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.resources", "tree_root": True} + ) + + name: str = Field(...) + keys: ExternalResourcesKeys = Field( + ..., + description="""A table for storing user terms that are used to refer to external resources.""", + ) + files: ExternalResourcesFiles = Field( + ..., description="""A table for storing object ids of files used in external resources.""" + ) + entities: ExternalResourcesEntities = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + objects: ExternalResourcesObjects = Field( + ..., + description="""A table for identifying which objects in a file contain references to external resources.""", + ) + object_keys: ExternalResourcesObjectKeys = Field( + ..., description="""A table for identifying which objects use which keys.""" + ) + + +class ExternalResourcesKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["keys"] = Field( + "keys", + json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, + ) + key: str = Field( + ..., + description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", + ) + + +class ExternalResourcesFiles(Data): + """ + A table for storing object ids of files used in external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["files"] = Field( + "files", + json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}}, + ) + file_object_id: str = Field( + ..., + description="""The object id (UUID) of a file that contains objects that refers to external resources.""", + ) + + +class ExternalResourcesEntities(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["entities"] = Field( + "entities", + json_schema_extra={ + "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} + }, + ) + keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") + entity_id: str = Field( + ..., + description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", + ) + entity_uri: str = Field( + ..., + description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", + ) + + +class ExternalResourcesObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["objects"] = Field( + "objects", + json_schema_extra={ + "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} + }, + ) + files_idx: int = Field( + ..., description="""The row index to the file in the `files` table containing the object.""" + ) + object_id: str = Field(..., description="""The object id (UUID) of the object.""") + object_type: str = Field(..., description="""The data type of the object.""") + relative_path: str = Field( + ..., + description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", + ) + field: str = Field( + ..., + description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + ) + + +class ExternalResourcesObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["object_keys"] = Field( + "object_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} + }, + ) + objects_idx: int = Field( + ..., description="""The row index to the object in the `objects` table that holds the key""" + ) + keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ExternalResources.model_rebuild() +ExternalResourcesKeys.model_rebuild() +ExternalResourcesFiles.model_rebuild() +ExternalResourcesEntities.model_rebuild() +ExternalResourcesObjects.model_rebuild() +ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py new file mode 100644 index 0000000..8361004 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py @@ -0,0 +1,89 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_experimental.v0_3_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesFiles, + ExternalResourcesEntities, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) +from ...hdmf_common.v1_6_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_6_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) +from ...hdmf_experimental.v0_3_0.hdmf_experimental_experimental import EnumData + +metamodel_version = "None" +version = "0.3.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental/", + "description": ( + "Experimental data structures provided by HDMF. These are not " + "guaranteed to be available in the future." + ), + "id": "hdmf-experimental", + "imports": [ + "hdmf-experimental.experimental", + "hdmf-experimental.resources", + "hdmf-experimental.nwb.language", + ], + "name": "hdmf-experimental", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/__init__.py @@ -0,0 +1 @@ + diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py new file mode 100644 index 0000000..0402b68 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py @@ -0,0 +1,91 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_table import VectorData +from numpydantic import NDArray, Shape + +metamodel_version = "None" +version = "0.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +NUMPYDANTIC_VERSION = "1.2.1" +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.experimental/", + "id": "hdmf-experimental.experimental", + "imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.experimental", + } +) + + +class EnumData(VectorData): + """ + Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.experimental", "tree_root": True} + ) + + name: str = Field(...) + elements: VectorData = Field( + ..., + description="""Reference to the VectorData object that contains the enumerable elements""", + ) + description: str = Field(..., description="""Description of what these vectors represent.""") + value: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py new file mode 100644 index 0000000..f0bb510 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py @@ -0,0 +1,225 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data + +metamodel_version = "None" +version = "0.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": False}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental.resources/", + "id": "hdmf-experimental.resources", + "imports": ["../../hdmf_common/v1_7_0/namespace", "hdmf-experimental.nwb.language"], + "name": "hdmf-experimental.resources", + } +) + + +class ExternalResources(Container): + """ + A set of five tables for tracking external resource references in a file. NOTE: this data type is experimental and is subject to change in a later version. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta( + {"from_schema": "hdmf-experimental.resources", "tree_root": True} + ) + + name: str = Field(...) + keys: ExternalResourcesKeys = Field( + ..., + description="""A table for storing user terms that are used to refer to external resources.""", + ) + files: ExternalResourcesFiles = Field( + ..., description="""A table for storing object ids of files used in external resources.""" + ) + entities: ExternalResourcesEntities = Field( + ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""" + ) + objects: ExternalResourcesObjects = Field( + ..., + description="""A table for identifying which objects in a file contain references to external resources.""", + ) + object_keys: ExternalResourcesObjectKeys = Field( + ..., description="""A table for identifying which objects use which keys.""" + ) + entity_keys: ExternalResourcesEntityKeys = Field( + ..., description="""A table for identifying which keys use which entity.""" + ) + + +class ExternalResourcesKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["keys"] = Field( + "keys", + json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, + ) + key: str = Field( + ..., + description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", + ) + + +class ExternalResourcesFiles(Data): + """ + A table for storing object ids of files used in external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["files"] = Field( + "files", + json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}}, + ) + file_object_id: str = Field( + ..., + description="""The object id (UUID) of a file that contains objects that refers to external resources.""", + ) + + +class ExternalResourcesEntities(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["entities"] = Field( + "entities", + json_schema_extra={ + "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} + }, + ) + entity_id: str = Field( + ..., + description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", + ) + entity_uri: str = Field( + ..., + description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", + ) + + +class ExternalResourcesObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["objects"] = Field( + "objects", + json_schema_extra={ + "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} + }, + ) + files_idx: int = Field( + ..., description="""The row index to the file in the `files` table containing the object.""" + ) + object_id: str = Field(..., description="""The object id (UUID) of the object.""") + object_type: str = Field(..., description="""The data type of the object.""") + relative_path: str = Field( + ..., + description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", + ) + field: str = Field( + ..., + description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + ) + + +class ExternalResourcesObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["object_keys"] = Field( + "object_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} + }, + ) + objects_idx: int = Field( + ..., description="""The row index to the object in the `objects` table that holds the key""" + ) + keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") + + +class ExternalResourcesEntityKeys(Data): + """ + A table for identifying which keys use which entity. + """ + + linkml_meta: ClassVar[LinkMLMeta] = LinkMLMeta({"from_schema": "hdmf-experimental.resources"}) + + name: Literal["entity_keys"] = Field( + "entity_keys", + json_schema_extra={ + "linkml_meta": {"equals_string": "entity_keys", "ifabsent": "string(entity_keys)"} + }, + ) + entities_idx: int = Field( + ..., description="""The row index to the entity in the `entities` table.""" + ) + keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ExternalResources.model_rebuild() +ExternalResourcesKeys.model_rebuild() +ExternalResourcesFiles.model_rebuild() +ExternalResourcesEntities.model_rebuild() +ExternalResourcesObjects.model_rebuild() +ExternalResourcesObjectKeys.model_rebuild() +ExternalResourcesEntityKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py new file mode 100644 index 0000000..c642308 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py @@ -0,0 +1,90 @@ +from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator +import numpy as np +from ...hdmf_experimental.v0_4_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesFiles, + ExternalResourcesEntities, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, + ExternalResourcesEntityKeys, +) +from ...hdmf_common.v1_7_0.hdmf_common_sparse import CSRMatrix, CSRMatrixData +from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container, SimpleMultiContainer +from ...hdmf_common.v1_7_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) +from ...hdmf_experimental.v0_4_0.hdmf_experimental_experimental import EnumData + +metamodel_version = "None" +version = "0.4.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="forbid", + arbitrary_types_allowed=True, + use_enum_values=True, + strict=False, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + +class LinkMLMeta(RootModel): + root: Dict[str, Any] = {} + model_config = ConfigDict(frozen=True) + + def __getattr__(self, key: str): + return getattr(self.root, key) + + def __getitem__(self, key: str): + return self.root[key] + + def __setitem__(self, key: str, value): + self.root[key] = value + + def __contains__(self, key: str) -> bool: + return key in self.root + + +linkml_meta = LinkMLMeta( + { + "annotations": { + "is_namespace": {"tag": "is_namespace", "value": True}, + "namespace": {"tag": "namespace", "value": "hdmf-experimental"}, + }, + "default_prefix": "hdmf-experimental/", + "description": ( + "Experimental data structures provided by HDMF. These are not " + "guaranteed to be available in the future." + ), + "id": "hdmf-experimental", + "imports": [ + "hdmf-experimental.experimental", + "hdmf-experimental.resources", + "hdmf-experimental.nwb.language", + ], + "name": "hdmf-experimental", + } +) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml index c7a8401..50aeafe 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-common description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml index c7a8401..50aeafe 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_2/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-common description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml index c7a8401..50aeafe 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_1_3/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-common description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml new file mode 100644 index 0000000..ff30beb --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.base.yaml @@ -0,0 +1,33 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.2.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..50aeafe --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-common +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..c32033d --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.sparse.yaml @@ -0,0 +1,81 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.2.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: a compressed sparse row matrix + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: the shape of this sparse matrix + array: + dimensions: + - alias: 'null' + exact_cardinality: 2 + range: int + required: true + multivalued: false + indices: + name: indices + description: column indices + range: CSRMatrix__indices + required: true + multivalued: false + indptr: + name: indptr + description: index pointer + range: CSRMatrix__indptr + required: true + multivalued: false + data: + name: data + description: values in the matrix + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__indices: + name: CSRMatrix__indices + description: column indices + attributes: + name: + name: name + ifabsent: string(indices) + range: string + required: true + equals_string: indices + CSRMatrix__indptr: + name: CSRMatrix__indptr + description: index pointer + attributes: + name: + name: name + ifabsent: string(indptr) + range: string + required: true + equals_string: indptr + CSRMatrix__data: + name: CSRMatrix__data + description: values in the matrix + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml new file mode 100644 index 0000000..7746e8e --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/hdmf-common.table.yaml @@ -0,0 +1,193 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.2.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + required: true + value: + name: value + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + required: true + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true + tree_root: true + VocabData: + name: VocabData + description: Data that come from a controlled vocabulary of text values. A data + value of i corresponds to the i-th element in the 'vocabulary' array attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + vocabulary: + name: vocabulary + description: The available items in the controlled vocabulary. + array: + dimensions: + - alias: 'null' + range: text + required: true + multivalued: false + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + required: true + multivalued: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + required: true + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml new file mode 100644 index 0000000..7befc87 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.2.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml new file mode 100644 index 0000000..9ef70fc --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.2.1 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers + is_a: Container + attributes: + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..50aeafe --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-common +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml new file mode 100644 index 0000000..3168d8a --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.sparse.yaml @@ -0,0 +1,83 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.2.1 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: a compressed sparse row matrix + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: the shape of this sparse matrix + array: + dimensions: + - alias: 'null' + exact_cardinality: 2 + range: int + required: true + multivalued: false + indices: + name: indices + description: column indices + range: CSRMatrix__indices + required: true + multivalued: false + indptr: + name: indptr + description: index pointer + range: CSRMatrix__indptr + required: true + multivalued: false + data: + name: data + description: values in the matrix + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__indices: + name: CSRMatrix__indices + description: column indices + attributes: + name: + name: name + ifabsent: string(indices) + range: string + required: true + equals_string: indices + CSRMatrix__indptr: + name: CSRMatrix__indptr + description: index pointer + attributes: + name: + name: name + ifabsent: string(indptr) + range: string + required: true + equals_string: indptr + CSRMatrix__data: + name: CSRMatrix__data + description: values in the matrix + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml new file mode 100644 index 0000000..2ce11ab --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/hdmf-common.table.yaml @@ -0,0 +1,193 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.2.1 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + required: true + value: + name: value + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + required: true + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true + tree_root: true + VocabData: + name: VocabData + description: Data that come from a controlled vocabulary of text values. A data + value of i corresponds to the i-th element in the 'vocabulary' array attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + vocabulary: + name: vocabulary + description: The available items in the controlled vocabulary. + array: + dimensions: + - alias: 'null' + range: text + required: true + multivalued: false + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + required: true + multivalued: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + required: true + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml new file mode 100644 index 0000000..e29bfb9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_2_1/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.2.1 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml new file mode 100644 index 0000000..1cfb2bc --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.3.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..50aeafe --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-common +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml new file mode 100644 index 0000000..918a6a5 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml @@ -0,0 +1,158 @@ +name: hdmf-common.resources +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.resources +version: 1.3.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.resources/ +classes: + ExternalResources: + name: ExternalResources + description: 'A set of four tables for tracking external resource references in + a file. NOTE: this data type is in beta testing and is subject to change in + a later version.' + is_a: Container + attributes: + name: + name: name + range: string + required: true + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + range: ExternalResources__keys + required: true + multivalued: false + resources: + name: resources + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__resources + required: true + multivalued: false + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + range: ExternalResources__objects + required: true + multivalued: false + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + range: ExternalResources__object_keys + required: true + multivalued: false + tree_root: true + ExternalResources__keys: + name: ExternalResources__keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(keys) + range: string + required: true + equals_string: keys + key_name: + name: key_name + description: The user term that maps to one or more resources in the 'resources' + table. + range: text + required: true + multivalued: false + ExternalResources__resources: + name: ExternalResources__resources + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(resources) + range: string + required: true + equals_string: resources + keytable_idx: + name: keytable_idx + description: The index to the key in the 'keys' table. + range: uint + required: true + multivalued: false + resource_name: + name: resource_name + description: The name of the online resource (e.g., website, database) that + has the entity. + range: text + required: true + multivalued: false + resource_id: + name: resource_id + description: The unique identifier for the resource entity at the resource. + range: text + required: true + multivalued: false + uri: + name: uri + description: The URI for the resource entity this reference applies to. This + can be an empty string. + range: text + required: true + multivalued: false + ExternalResources__objects: + name: ExternalResources__objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(objects) + range: string + required: true + equals_string: objects + object_id: + name: object_id + description: The UUID for the object. + range: text + required: true + multivalued: false + field: + name: field + description: The field of the object. This can be an empty string if the object + is a dataset and the field is the dataset values. + range: text + required: true + multivalued: false + ExternalResources__object_keys: + name: ExternalResources__object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + name: + name: name + ifabsent: string(object_keys) + range: string + required: true + equals_string: object_keys + objecttable_idx: + name: objecttable_idx + description: The index to the 'objects' table for the object that holds the + key. + range: uint + required: true + multivalued: false + keytable_idx: + name: keytable_idx + description: The index to the 'keys' table for the key. + range: uint + required: true + multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..55db34f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.sparse.yaml @@ -0,0 +1,68 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.3.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + required: true + multivalued: true + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml new file mode 100644 index 0000000..cae8e9e --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.table.yaml @@ -0,0 +1,193 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.3.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + required: true + value: + name: value + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + required: true + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true + tree_root: true + VocabData: + name: VocabData + description: Data that come from a controlled vocabulary of text values. A data + value of i corresponds to the i-th element in the 'vocabulary' array attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + vocabulary: + name: vocabulary + description: The available items in the controlled vocabulary. + array: + dimensions: + - alias: 'null' + range: text + required: true + multivalued: false + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + required: true + multivalued: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + required: true + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml new file mode 100644 index 0000000..11885e7 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/namespace.yaml @@ -0,0 +1,18 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.3.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.resources +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml new file mode 100644 index 0000000..6495eb4 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.4.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..e3d3df3 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..13b5f58 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.sparse.yaml @@ -0,0 +1,68 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.4.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + required: true + multivalued: true + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml new file mode 100644 index 0000000..a88c85f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/hdmf-common.table.yaml @@ -0,0 +1,173 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.4.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + required: true + value: + name: value + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + required: true + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + required: true + multivalued: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + required: true + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml new file mode 100644 index 0000000..50680da --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_4_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.4.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml index c7a8401..e3d3df3 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml new file mode 100644 index 0000000..ca0d043 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.5.1 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..e3d3df3 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml new file mode 100644 index 0000000..21654df --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.sparse.yaml @@ -0,0 +1,68 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.5.1 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + required: true + multivalued: true + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml new file mode 100644 index 0000000..3849f90 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/hdmf-common.table.yaml @@ -0,0 +1,192 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.5.1 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + required: true + value: + name: value + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + required: true + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + required: true + multivalued: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + required: true + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true + AlignedDynamicTable: + name: AlignedDynamicTable + description: DynamicTable container that supports storing a collection of sub-tables. + Each sub-table is a DynamicTable itself that is aligned with the main table + by row index. I.e., all DynamicTables stored in this group MUST have the same + number of rows. This type effectively defines a 2-level table in which the main + data is stored in the main table implemented by this type and additional columns + of the table are grouped into categories, with each category being represented + by a separate DynamicTable stored within the group. + is_a: DynamicTable + attributes: + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml new file mode 100644 index 0000000..917870d --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_5_1/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.5.1 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml new file mode 100644 index 0000000..293c18a --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.6.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..e3d3df3 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..7ed736f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.sparse.yaml @@ -0,0 +1,68 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.6.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + required: true + multivalued: true + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml new file mode 100644 index 0000000..ea22ad5 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/hdmf-common.table.yaml @@ -0,0 +1,192 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.6.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + required: true + value: + name: value + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + required: true + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + required: true + multivalued: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + required: true + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true + AlignedDynamicTable: + name: AlignedDynamicTable + description: DynamicTable container that supports storing a collection of sub-tables. + Each sub-table is a DynamicTable itself that is aligned with the main table + by row index. I.e., all DynamicTables stored in this group MUST have the same + number of rows. This type effectively defines a 2-level table in which the main + data is stored in the main table implemented by this type and additional columns + of the table are grouped into categories, with each category being represented + by a separate DynamicTable stored within the group. + is_a: DynamicTable + attributes: + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml new file mode 100644 index 0000000..241b849 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_6_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.6.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml new file mode 100644 index 0000000..1b7dcb9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.base.yaml @@ -0,0 +1,46 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.7.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..e3d3df3 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..6167b42 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.sparse.yaml @@ -0,0 +1,68 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.7.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + required: true + multivalued: true + indices: + name: indices + description: The column indices. + array: + dimensions: + - alias: number_of_non_zero_values + range: uint + required: true + multivalued: false + indptr: + name: indptr + description: The row index pointer. + array: + dimensions: + - alias: number_of_rows_in_the_matrix_1 + range: uint + required: true + multivalued: false + data: + name: data + description: The non-zero values in the matrix. + range: CSRMatrix__data + required: true + multivalued: false + tree_root: true + CSRMatrix__data: + name: CSRMatrix__data + description: The non-zero values in the matrix. + attributes: + name: + name: name + ifabsent: string(data) + range: string + required: true + equals_string: data diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml new file mode 100644 index 0000000..8149ebe --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/hdmf-common.table.yaml @@ -0,0 +1,192 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.7.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + required: true + value: + name: value + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + required: true + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + required: true + description: + name: description + description: Description of what this table region points to. + range: text + required: true + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + required: true + multivalued: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + required: true + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + array: + dimensions: + - alias: num_rows + range: int + required: true + multivalued: false + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + range: VectorData + required: false + multivalued: true + tree_root: true + AlignedDynamicTable: + name: AlignedDynamicTable + description: DynamicTable container that supports storing a collection of sub-tables. + Each sub-table is a DynamicTable itself that is aligned with the main table + by row index. I.e., all DynamicTables stored in this group MUST have the same + number of rows. This type effectively defines a 2-level table in which the main + data is stored in the main table implemented by this type and additional columns + of the table are grouped into categories, with each category being represented + by a separate DynamicTable stored within the group. + is_a: DynamicTable + attributes: + value: + name: value + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml new file mode 100644 index 0000000..b689554 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_7_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.7.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml index c7a8401..e3d3df3 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml index 0a31806..064f647 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.experimental.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.experimental version: 0.1.0 imports: -- ../../hdmf_common/v1_5_0/namespace +- ../../hdmf_common/v1_4_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.experimental/ classes: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml index bab9ede..0a824ca 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml index 17a7d9d..89ffc2c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml @@ -9,7 +9,7 @@ annotations: id: hdmf-experimental.resources version: 0.1.0 imports: -- ../../hdmf_common/v1_5_0/namespace +- ../../hdmf_common/v1_4_0/namespace - hdmf-experimental.nwb.language default_prefix: hdmf-experimental.resources/ classes: diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml new file mode 100644 index 0000000..94b3194 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.experimental.yaml @@ -0,0 +1,32 @@ +name: hdmf-experimental.experimental +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.experimental +version: 0.2.0 +imports: +- ../../hdmf_common/v1_5_1/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.experimental/ +classes: + EnumData: + name: EnumData + description: Data that come from a fixed set of values. A data value of i corresponds + to the i-th value in the VectorData referenced by the 'elements' attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + elements: + name: elements + description: Reference to the VectorData object that contains the enumerable + elements + range: VectorData + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml new file mode 100644 index 0000000..0a824ca --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-experimental.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml new file mode 100644 index 0000000..c2fc8d8 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml @@ -0,0 +1,196 @@ +name: hdmf-experimental.resources +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.resources +version: 0.2.0 +imports: +- ../../hdmf_common/v1_5_1/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.resources/ +classes: + ExternalResources: + name: ExternalResources + description: 'A set of four tables for tracking external resource references in + a file. NOTE: this data type is in beta testing and is subject to change in + a later version.' + is_a: Container + attributes: + name: + name: name + range: string + required: true + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + range: ExternalResources__keys + required: true + multivalued: false + entities: + name: entities + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__entities + required: true + multivalued: false + resources: + name: resources + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__resources + required: true + multivalued: false + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + range: ExternalResources__objects + required: true + multivalued: false + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + range: ExternalResources__object_keys + required: true + multivalued: false + tree_root: true + ExternalResources__keys: + name: ExternalResources__keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(keys) + range: string + required: true + equals_string: keys + key: + name: key + description: The user term that maps to one or more resources in the 'resources' + table. + range: text + required: true + multivalued: false + ExternalResources__entities: + name: ExternalResources__entities + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entities) + range: string + required: true + equals_string: entities + keys_idx: + name: keys_idx + description: The index to the key in the 'keys' table. + range: uint + required: true + multivalued: false + resources_idx: + name: resources_idx + description: The index into the 'resources' table + range: uint + required: true + multivalued: false + entity_id: + name: entity_id + description: The unique identifier entity. + range: text + required: true + multivalued: false + entity_uri: + name: entity_uri + description: The URI for the entity this reference applies to. This can be + an empty string. + range: text + required: true + multivalued: false + ExternalResources__resources: + name: ExternalResources__resources + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(resources) + range: string + required: true + equals_string: resources + resource: + name: resource + description: The name of the resource. + range: text + required: true + multivalued: false + resource_uri: + name: resource_uri + description: The URI for the resource. This can be an empty string. + range: text + required: true + multivalued: false + ExternalResources__objects: + name: ExternalResources__objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(objects) + range: string + required: true + equals_string: objects + object_id: + name: object_id + description: The UUID for the object. + range: text + required: true + multivalued: false + relative_path: + name: relative_path + description: The relative path from the container with the object_id to the + dataset or attribute with the value(s) that is associated with an external + resource. This can be an empty string if the container is a dataset which + contains the value(s) that is associated with an external resource. + range: text + required: true + multivalued: false + field: + name: field + description: The field of the compound data type using an external resource. + This is used only if the dataset or attribute is a compound data type; otherwise + this should be an empty string. + range: text + required: true + multivalued: false + ExternalResources__object_keys: + name: ExternalResources__object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + name: + name: name + ifabsent: string(object_keys) + range: string + required: true + equals_string: object_keys + objects_idx: + name: objects_idx + description: The index to the 'objects' table for the object that holds the + key. + range: uint + required: true + multivalued: false + keys_idx: + name: keys_idx + description: The index to the 'keys' table for the key. + range: uint + required: true + multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml new file mode 100644 index 0000000..6a311e0 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-experimental +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-experimental +description: Experimental data structures provided by HDMF. These are not guaranteed + to be available in the future. +id: hdmf-experimental +version: 0.2.0 +imports: +- hdmf-experimental.experimental +- hdmf-experimental.resources +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml new file mode 100644 index 0000000..4991b33 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.experimental.yaml @@ -0,0 +1,32 @@ +name: hdmf-experimental.experimental +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.experimental +version: 0.3.0 +imports: +- ../../hdmf_common/v1_6_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.experimental/ +classes: + EnumData: + name: EnumData + description: Data that come from a fixed set of values. A data value of i corresponds + to the i-th value in the VectorData referenced by the 'elements' attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + elements: + name: elements + description: Reference to the VectorData object that contains the enumerable + elements + range: VectorData + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml new file mode 100644 index 0000000..0a824ca --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-experimental.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml new file mode 100644 index 0000000..350ef24 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml @@ -0,0 +1,199 @@ +name: hdmf-experimental.resources +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.resources +version: 0.3.0 +imports: +- ../../hdmf_common/v1_6_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.resources/ +classes: + ExternalResources: + name: ExternalResources + description: 'A set of five tables for tracking external resource references in + a file. NOTE: this data type is experimental and is subject to change in a later + version.' + is_a: Container + attributes: + name: + name: name + range: string + required: true + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + range: ExternalResources__keys + required: true + multivalued: false + files: + name: files + description: A table for storing object ids of files used in external resources. + range: ExternalResources__files + required: true + multivalued: false + entities: + name: entities + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__entities + required: true + multivalued: false + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + range: ExternalResources__objects + required: true + multivalued: false + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + range: ExternalResources__object_keys + required: true + multivalued: false + tree_root: true + ExternalResources__keys: + name: ExternalResources__keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(keys) + range: string + required: true + equals_string: keys + key: + name: key + description: The user term that maps to one or more resources in the `resources` + table, e.g., "human". + range: text + required: true + multivalued: false + ExternalResources__files: + name: ExternalResources__files + description: A table for storing object ids of files used in external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(files) + range: string + required: true + equals_string: files + file_object_id: + name: file_object_id + description: The object id (UUID) of a file that contains objects that refers + to external resources. + range: text + required: true + multivalued: false + ExternalResources__entities: + name: ExternalResources__entities + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entities) + range: string + required: true + equals_string: entities + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + range: uint + required: true + multivalued: false + entity_id: + name: entity_id + description: The compact uniform resource identifier (CURIE) of the entity, + in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. + range: text + required: true + multivalued: false + entity_uri: + name: entity_uri + description: The URI for the entity this reference applies to. This can be + an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 + range: text + required: true + multivalued: false + ExternalResources__objects: + name: ExternalResources__objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(objects) + range: string + required: true + equals_string: objects + files_idx: + name: files_idx + description: The row index to the file in the `files` table containing the + object. + range: uint + required: true + multivalued: false + object_id: + name: object_id + description: The object id (UUID) of the object. + range: text + required: true + multivalued: false + object_type: + name: object_type + description: The data type of the object. + range: text + required: true + multivalued: false + relative_path: + name: relative_path + description: The relative path from the data object with the `object_id` to + the dataset or attribute with the value(s) that is associated with an external + resource. This can be an empty string if the object is a dataset that contains + the value(s) that is associated with an external resource. + range: text + required: true + multivalued: false + field: + name: field + description: The field within the compound data type using an external resource. + This is used only if the dataset or attribute is a compound data type; otherwise + this should be an empty string. + range: text + required: true + multivalued: false + ExternalResources__object_keys: + name: ExternalResources__object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + name: + name: name + ifabsent: string(object_keys) + range: string + required: true + equals_string: object_keys + objects_idx: + name: objects_idx + description: The row index to the object in the `objects` table that holds + the key + range: uint + required: true + multivalued: false + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + range: uint + required: true + multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml new file mode 100644 index 0000000..fe62e64 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-experimental +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-experimental +description: Experimental data structures provided by HDMF. These are not guaranteed + to be available in the future. +id: hdmf-experimental +version: 0.3.0 +imports: +- hdmf-experimental.experimental +- hdmf-experimental.resources +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml new file mode 100644 index 0000000..6332939 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.experimental.yaml @@ -0,0 +1,32 @@ +name: hdmf-experimental.experimental +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.experimental +version: 0.4.0 +imports: +- ../../hdmf_common/v1_7_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.experimental/ +classes: + EnumData: + name: EnumData + description: Data that come from a fixed set of values. A data value of i corresponds + to the i-th value in the VectorData referenced by the 'elements' attribute. + is_a: VectorData + attributes: + name: + name: name + range: string + required: true + elements: + name: elements + description: Reference to the VectorData object that contains the enumerable + elements + range: VectorData + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml new file mode 100644 index 0000000..0a824ca --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.nwb.language.yaml @@ -0,0 +1,94 @@ +name: hdmf-experimental.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: hdmf-experimental +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml new file mode 100644 index 0000000..8768e73 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml @@ -0,0 +1,222 @@ +name: hdmf-experimental.resources +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.resources +version: 0.4.0 +imports: +- ../../hdmf_common/v1_7_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.resources/ +classes: + ExternalResources: + name: ExternalResources + description: 'A set of five tables for tracking external resource references in + a file. NOTE: this data type is experimental and is subject to change in a later + version.' + is_a: Container + attributes: + name: + name: name + range: string + required: true + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + range: ExternalResources__keys + required: true + multivalued: false + files: + name: files + description: A table for storing object ids of files used in external resources. + range: ExternalResources__files + required: true + multivalued: false + entities: + name: entities + description: A table for mapping user terms (i.e., keys) to resource entities. + range: ExternalResources__entities + required: true + multivalued: false + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + range: ExternalResources__objects + required: true + multivalued: false + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + range: ExternalResources__object_keys + required: true + multivalued: false + entity_keys: + name: entity_keys + description: A table for identifying which keys use which entity. + range: ExternalResources__entity_keys + required: true + multivalued: false + tree_root: true + ExternalResources__keys: + name: ExternalResources__keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(keys) + range: string + required: true + equals_string: keys + key: + name: key + description: The user term that maps to one or more resources in the `resources` + table, e.g., "human". + range: text + required: true + multivalued: false + ExternalResources__files: + name: ExternalResources__files + description: A table for storing object ids of files used in external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(files) + range: string + required: true + equals_string: files + file_object_id: + name: file_object_id + description: The object id (UUID) of a file that contains objects that refers + to external resources. + range: text + required: true + multivalued: false + ExternalResources__entities: + name: ExternalResources__entities + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entities) + range: string + required: true + equals_string: entities + entity_id: + name: entity_id + description: The compact uniform resource identifier (CURIE) of the entity, + in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. + range: text + required: true + multivalued: false + entity_uri: + name: entity_uri + description: The URI for the entity this reference applies to. This can be + an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 + range: text + required: true + multivalued: false + ExternalResources__objects: + name: ExternalResources__objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(objects) + range: string + required: true + equals_string: objects + files_idx: + name: files_idx + description: The row index to the file in the `files` table containing the + object. + range: uint + required: true + multivalued: false + object_id: + name: object_id + description: The object id (UUID) of the object. + range: text + required: true + multivalued: false + object_type: + name: object_type + description: The data type of the object. + range: text + required: true + multivalued: false + relative_path: + name: relative_path + description: The relative path from the data object with the `object_id` to + the dataset or attribute with the value(s) that is associated with an external + resource. This can be an empty string if the object is a dataset that contains + the value(s) that is associated with an external resource. + range: text + required: true + multivalued: false + field: + name: field + description: The field within the compound data type using an external resource. + This is used only if the dataset or attribute is a compound data type; otherwise + this should be an empty string. + range: text + required: true + multivalued: false + ExternalResources__object_keys: + name: ExternalResources__object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + name: + name: name + ifabsent: string(object_keys) + range: string + required: true + equals_string: object_keys + objects_idx: + name: objects_idx + description: The row index to the object in the `objects` table that holds + the key + range: uint + required: true + multivalued: false + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + range: uint + required: true + multivalued: false + ExternalResources__entity_keys: + name: ExternalResources__entity_keys + description: A table for identifying which keys use which entity. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entity_keys) + range: string + required: true + equals_string: entity_keys + entities_idx: + name: entities_idx + description: The row index to the entity in the `entities` table. + range: uint + required: true + multivalued: false + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + range: uint + required: true + multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml new file mode 100644 index 0000000..a48814e --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-experimental +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-experimental +description: Experimental data structures provided by HDMF. These are not guaranteed + to be available in the future. +id: hdmf-experimental +version: 0.4.0 +imports: +- hdmf-experimental.experimental +- hdmf-experimental.resources +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml index bab9ede..0a824ca 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml @@ -5,7 +5,7 @@ annotations: value: 'False' namespace: tag: namespace - value: core + value: hdmf-experimental description: Adapter objects to mimic the behavior of elements in the nwb-schema-language id: nwb.language imports: From 83dd59d30ddc9a79ffdd32561e7a631012bdeaaf Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 20:59:01 -0700 Subject: [PATCH 20/61] lint! --- docs/intro/translation.md | 2 +- nwb_linkml/src/nwb_linkml/adapters/adapter.py | 2 +- nwb_linkml/src/nwb_linkml/adapters/attribute.py | 9 ++++++--- nwb_linkml/src/nwb_linkml/adapters/dataset.py | 2 +- nwb_linkml/src/nwb_linkml/adapters/group.py | 2 +- nwb_linkml/src/nwb_linkml/maps/dtype.py | 1 + 6 files changed, 11 insertions(+), 7 deletions(-) diff --git a/docs/intro/translation.md b/docs/intro/translation.md index a7dec83..613b93f 100644 --- a/docs/intro/translation.md +++ b/docs/intro/translation.md @@ -308,7 +308,7 @@ There are several different ways to create references between objects in nwb/hdm - Implicitly, hdmf creates references between objects according to some naming conventions, eg. an attribute/dataset that is a `VectorIndex` named `mydata_index` will be linked to a `VectorData` object `mydata`. -- There is currrently a note in the schema language docs that there will be an additional +- There is currently a note in the schema language docs that there will be an additional [Relationships](https://schema-language.readthedocs.io/en/latest/description.html#relationships) system that explicitly models relationships, but it is unclear how that would be different than references. diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index 6823bb0..72f4248 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -26,7 +26,7 @@ from linkml_runtime.linkml_model import ( ) from pydantic import BaseModel -from nwb_schema_language import Attribute, Dataset, Group, Schema, CompoundDtype +from nwb_schema_language import Attribute, CompoundDtype, Dataset, Group, Schema if sys.version_info.minor >= 11: from typing import TypeVarTuple, Unpack diff --git a/nwb_linkml/src/nwb_linkml/adapters/attribute.py b/nwb_linkml/src/nwb_linkml/adapters/attribute.py index c7ebd19..ddf6edb 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/attribute.py +++ b/nwb_linkml/src/nwb_linkml/adapters/attribute.py @@ -3,12 +3,12 @@ Adapters for attribute types """ from abc import abstractmethod -from typing import ClassVar, Optional, TypedDict, Type +from typing import ClassVar, Optional, Type, TypedDict from linkml_runtime.linkml_model.meta import SlotDefinition +from nwb_linkml.adapters.adapter import Adapter, BuildResult, is_1d from nwb_linkml.adapters.array import ArrayAdapter -from nwb_linkml.adapters.adapter import BuildResult, is_1d, Adapter from nwb_linkml.maps import Map from nwb_linkml.maps.dtype import handle_dtype from nwb_schema_language import Attribute @@ -24,16 +24,19 @@ def _make_ifabsent(val: str | int | float | None) -> str | None: elif isinstance(val, float): return f"float({val})" else: - return str(value) + return str(val) class AttrDefaults(TypedDict): + """Default fields for an attribute""" + equals_string: str | None equals_number: float | int | None ifabsent: str | None class AttributeMap(Map): + """Base class for attribute mapping transformations :)""" @classmethod def handle_defaults(cls, attr: Attribute) -> AttrDefaults: diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py index bcbd109..824fe84 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py +++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py @@ -7,7 +7,7 @@ from typing import ClassVar, Optional, Type from linkml_runtime.linkml_model.meta import ArrayExpression, SlotDefinition -from nwb_linkml.adapters.adapter import BuildResult, is_1d, is_compound, has_attrs +from nwb_linkml.adapters.adapter import BuildResult, has_attrs, is_1d, is_compound from nwb_linkml.adapters.array import ArrayAdapter from nwb_linkml.adapters.classes import ClassAdapter from nwb_linkml.maps import QUANTITY_MAP, Map diff --git a/nwb_linkml/src/nwb_linkml/adapters/group.py b/nwb_linkml/src/nwb_linkml/adapters/group.py index 451cb4c..13a03b7 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/group.py +++ b/nwb_linkml/src/nwb_linkml/adapters/group.py @@ -2,7 +2,7 @@ Adapter for NWB groups to linkml Classes """ -from typing import Type, List +from typing import List, Type from linkml_runtime.linkml_model import SlotDefinition diff --git a/nwb_linkml/src/nwb_linkml/maps/dtype.py b/nwb_linkml/src/nwb_linkml/maps/dtype.py index 0cc7c79..26d25a6 100644 --- a/nwb_linkml/src/nwb_linkml/maps/dtype.py +++ b/nwb_linkml/src/nwb_linkml/maps/dtype.py @@ -7,6 +7,7 @@ from typing import Any, Type import nptyping import numpy as np + from nwb_schema_language import CompoundDtype, DTypeType, FlatDtype, ReferenceDtype flat_to_linkml = { From 6431afa7e5352824e5bf0b5c0bfebea2817ee5f9 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 21:11:12 -0700 Subject: [PATCH 21/61] update tests --- nwb_linkml/src/nwb_linkml/adapters/dataset.py | 18 ++++++++++++++---- .../test_adapters/test_adapter_classes.py | 13 +++++++------ 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py index 824fe84..3a49798 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py +++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py @@ -154,10 +154,14 @@ class MapScalarAttributes(DatasetMap): name: rate description: Sampling rate, in Hz. range: float32 + required: true unit: name: unit description: Unit of measurement for time, which is fixed to 'seconds'. + ifabsent: string(seconds) range: text + required: true + equals_string: seconds value: name: value range: float64 @@ -244,11 +248,15 @@ class MapListlike(DatasetMap): range: string required: true value: - name: image + name: value + annotations: + source_type: + tag: source_type + value: reference description: Ordered dataset of references to Image objects. - multivalued: true range: Image required: true + multivalued: true tree_root: true """ @@ -476,12 +484,14 @@ class MapArrayLikeAttributes(DatasetMap): name: resolution description: Pixel resolution of the image, in pixels per centimeter. range: float32 + required: false description: name: description description: Description of the image. range: text - array: - name: array + required: false + value: + name: value range: numeric any_of: - array: diff --git a/nwb_linkml/tests/test_adapters/test_adapter_classes.py b/nwb_linkml/tests/test_adapters/test_adapter_classes.py index 464e55f..126869e 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter_classes.py +++ b/nwb_linkml/tests/test_adapters/test_adapter_classes.py @@ -3,6 +3,7 @@ from linkml_runtime.linkml_model import SlotDefinition from nwb_linkml.adapters import DatasetAdapter, GroupAdapter from nwb_schema_language import CompoundDtype, Dataset, Group, ReferenceDtype +from nwb_linkml.maps.dtype import handle_dtype @pytest.mark.xfail() @@ -89,7 +90,7 @@ def test_get_full_name(): parent.cls.neurodata_type_def = None parent.cls.name = "ParentName" parent.parent = grandparent - assert adapter._get_full_name() == "Grandparent__ParentName__ChildName" + assert adapter._get_full_name() == "ParentName__ChildName" # if it has none, raise value error adapter.cls.name = None @@ -179,9 +180,9 @@ def test_handle_dtype(nwb_schema): CompoundDtype(name="reference", doc="reference!", dtype=reftype), ] - assert cls.handle_dtype(reftype) == "TargetClass" - assert cls.handle_dtype(None) == "AnyType" - assert cls.handle_dtype([]) == "AnyType" + assert handle_dtype(reftype) == "TargetClass" + assert handle_dtype(None) == "AnyType" + assert handle_dtype([]) == "AnyType" # handling compound types is currently TODO - assert cls.handle_dtype(compoundtype) == "AnyType" - assert cls.handle_dtype("int32") == "int32" + assert handle_dtype(compoundtype) == "AnyType" + assert handle_dtype("int32") == "int32" From c2e663eca0279f5499e368e5902db5061c4defa7 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 5 Aug 2024 21:12:34 -0700 Subject: [PATCH 22/61] fuckin LINT --- nwb_linkml/tests/test_adapters/test_adapter_classes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nwb_linkml/tests/test_adapters/test_adapter_classes.py b/nwb_linkml/tests/test_adapters/test_adapter_classes.py index 126869e..ee6e7f6 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter_classes.py +++ b/nwb_linkml/tests/test_adapters/test_adapter_classes.py @@ -2,8 +2,8 @@ import pytest from linkml_runtime.linkml_model import SlotDefinition from nwb_linkml.adapters import DatasetAdapter, GroupAdapter -from nwb_schema_language import CompoundDtype, Dataset, Group, ReferenceDtype from nwb_linkml.maps.dtype import handle_dtype +from nwb_schema_language import CompoundDtype, Dataset, Group, ReferenceDtype @pytest.mark.xfail() From fbb06dac52bf60d9958ec82911f8cbeec0761f3e Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 6 Aug 2024 01:46:20 -0700 Subject: [PATCH 23/61] use coveralls action --- .github/workflows/tests.yml | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 12ab684..c14e806 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -37,8 +37,20 @@ jobs: run: pytest working-directory: nwb_linkml - - name: Report coverage - working-directory: nwb_linkml - run: "coveralls --service=github" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Coveralls Parallel + uses: coverallsapp/github-action@v2.3.0 + if: runner.os != 'macOS' + with: + flag-name: run-${{ join(matrix.*, '-') }} + parallel: true + debug: true + + finish-coverage: + needs: test + if: ${{ always() }} + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: coverallsapp/github-action@v2.3.0 + with: + parallel-finished: true \ No newline at end of file From a11d3d042e44491474b1eb1e9438dd121fd17f9a Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 6 Aug 2024 19:44:04 -0700 Subject: [PATCH 24/61] working ragged array indexing before rebuilding models --- docs/meta/todo.md | 1 + nwb_linkml/src/nwb_linkml/includes/hdmf.py | 147 ++++++++++++++++- nwb_linkml/src/nwb_linkml/includes/types.py | 2 +- .../hdmf_common/v1_8_0/hdmf_common_table.py | 152 ++++++++++++++++-- nwb_linkml/tests/test_includes/test_hdmf.py | 53 ++++++ 5 files changed, 332 insertions(+), 23 deletions(-) diff --git a/docs/meta/todo.md b/docs/meta/todo.md index 6508c62..3d3efcb 100644 --- a/docs/meta/todo.md +++ b/docs/meta/todo.md @@ -7,6 +7,7 @@ NWB schema translation - handle compound `dtype` like in ophys.PlaneSegmentation.pixel_mask - handle compound `dtype` like in TimeSeriesReferenceVectorData - Create a validator that checks if all the lists in a compound dtype dataset are same length +- [ ] Make `target` optional in vectorIndex Cleanup - [ ] Update pydantic generator diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index e4534de..c34f757 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -5,9 +5,19 @@ Special types for mimicking HDMF special case behavior from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Tuple, Union, overload from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport -from numpydantic import NDArray +from numpydantic import NDArray, Shape from pandas import DataFrame, Series -from pydantic import BaseModel, ConfigDict, Field, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + model_validator, + field_validator, + ValidatorFunctionWrapHandler, + ValidationError, + ValidationInfo, +) +import numpy as np if TYPE_CHECKING: from nwb_linkml.models import VectorData, VectorIndex @@ -31,6 +41,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -143,7 +154,28 @@ class DynamicTableMixin(BaseModel): @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -167,6 +199,12 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="before") + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + If an id column is not given, create one as an arange. + """ + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -189,6 +227,38 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [l == lengths[0] for l in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns(cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo): + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + class VectorDataMixin(BaseModel): """ @@ -200,6 +270,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -214,6 +289,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -224,6 +320,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -231,19 +332,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return [self.target.value[slice(start, end)]] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -252,6 +353,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + DYNAMIC_TABLE_IMPORTS = Imports( imports=[ @@ -266,8 +385,20 @@ DYNAMIC_TABLE_IMPORTS = Imports( ObjectImport(name="Tuple"), ], ), - Import(module="numpydantic", objects=[ObjectImport(name="NDArray")]), - Import(module="pydantic", objects=[ObjectImport(name="model_validator")]), + Import( + module="numpydantic", objects=[ObjectImport(name="NDArray"), ObjectImport(name="Shape")] + ), + Import( + module="pydantic", + objects=[ + ObjectImport(name="model_validator"), + ObjectImport(name="field_validator"), + ObjectImport(name="ValidationInfo"), + ObjectImport(name="ValidatorFunctionWrapHandler"), + ObjectImport(name="ValidationError"), + ], + ), + Import(module="numpy", alias="np"), ] ) """ diff --git a/nwb_linkml/src/nwb_linkml/includes/types.py b/nwb_linkml/src/nwb_linkml/includes/types.py index 049aa65..2604eb5 100644 --- a/nwb_linkml/src/nwb_linkml/includes/types.py +++ b/nwb_linkml/src/nwb_linkml/includes/types.py @@ -19,7 +19,7 @@ ModelTypeString = """ModelType = TypeVar("ModelType", bound=Type[BaseModel])""" def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index ef6ba01..7dbb253 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -1,15 +1,22 @@ from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -import numpy as np -from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container + + +from ...hdmf_common.v1_8_0.hdmf_common_base import Data from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from typing import Any, ClassVar, List, Dict, Optional, Union, overload, Tuple +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + model_validator, + field_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) from numpydantic import NDArray, Shape +import numpy as np metamodel_version = "None" version = "1.8.0" @@ -60,6 +67,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -74,6 +86,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -84,6 +117,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -91,12 +129,12 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: @@ -112,6 +150,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -131,6 +187,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -222,6 +279,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -241,9 +302,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item): + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -289,6 +381,38 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [l == lengths[0] for l in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns(cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo): + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -335,8 +459,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 2a3b1d0..557e7db 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -10,6 +10,7 @@ from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( ElectricalSeries, ElectrodeGroup, ExtracellularEphysElectrodes, + Units, ) @@ -56,6 +57,40 @@ def electrical_series() -> Tuple["ElectricalSeries", "ExtracellularEphysElectrod return electrical_series, electrodes +@pytest.fixture(params=[True, False]) +def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]: + """ + Test case for units + + Parameterized by extra_column because pandas likes to pivot dataframes + to long when there is only one column and it's not len() == 1 + """ + + n_units = 24 + spike_times = [ + np.full(shape=np.random.randint(10, 50), fill_value=i, dtype=float) for i in range(n_units) + ] + spike_idx = [] + for i in range(n_units): + if i == 0: + spike_idx.append(len(spike_times[0])) + else: + spike_idx.append(len(spike_times[i]) + spike_idx[i - 1]) + spike_idx = np.array(spike_idx) + + spike_times_flat = np.concatenate(spike_times) + + kwargs = { + "description": "units!!!!", + "spike_times": spike_times_flat, + "spike_times_index": spike_idx, + } + if request.param: + kwargs["extra_column"] = ["hey!"] * n_units + units = Units(**kwargs) + return units, spike_times, spike_idx + + def test_dynamictable_indexing(electrical_series): """ Can index values from a dynamictable @@ -106,6 +141,24 @@ def test_dynamictable_indexing(electrical_series): assert subsection.dtypes.values.tolist() == dtypes[0:3] +def test_dynamictable_ragged_arrays(units): + """ + Should be able to index ragged arrays using an implicit _index column + + Also tests: + - passing arrays directly instead of wrapping in vectordata/index specifically, + if the models in the fixture instantiate then this works + """ + units, spike_times, spike_idx = units + + # ensure we don't pivot to long when indexing + assert units[0].shape[0] == 1 + # check that we got the indexing boundaries corrunect + # (and that we are forwarding attr calls to the dataframe by accessing shape + for i in range(units.shape[0]): + assert np.all(units.iloc[i, 0] == spike_times[i]) + + def test_dynamictable_append_column(): pass From a309c25c3d991e09d01d588912f95f99ec8e4076 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 6 Aug 2024 19:57:45 -0700 Subject: [PATCH 25/61] regenerate model, lint --- docs/meta/todo.md | 2 +- .../src/nwb_linkml/generators/pydantic.py | 8 + nwb_linkml/src/nwb_linkml/includes/hdmf.py | 40 +++-- .../hdmf_common/v1_1_0/hdmf_common_table.py | 149 ++++++++++++++++-- .../hdmf_common/v1_1_2/hdmf_common_table.py | 149 ++++++++++++++++-- .../hdmf_common/v1_1_3/hdmf_common_table.py | 149 ++++++++++++++++-- .../hdmf_common/v1_2_0/hdmf_common_table.py | 145 ++++++++++++++++- .../hdmf_common/v1_2_1/hdmf_common_table.py | 145 ++++++++++++++++- .../hdmf_common/v1_3_0/hdmf_common_table.py | 145 ++++++++++++++++- .../hdmf_common/v1_4_0/hdmf_common_table.py | 145 ++++++++++++++++- .../hdmf_common/v1_5_0/hdmf_common_table.py | 145 ++++++++++++++++- .../hdmf_common/v1_5_1/hdmf_common_table.py | 145 ++++++++++++++++- .../hdmf_common/v1_6_0/hdmf_common_table.py | 145 ++++++++++++++++- .../hdmf_common/v1_7_0/hdmf_common_table.py | 145 ++++++++++++++++- .../hdmf_common/v1_8_0/hdmf_common_table.py | 31 ++-- pyproject.toml | 2 + 16 files changed, 1570 insertions(+), 120 deletions(-) diff --git a/docs/meta/todo.md b/docs/meta/todo.md index 3d3efcb..92355cf 100644 --- a/docs/meta/todo.md +++ b/docs/meta/todo.md @@ -7,7 +7,7 @@ NWB schema translation - handle compound `dtype` like in ophys.PlaneSegmentation.pixel_mask - handle compound `dtype` like in TimeSeriesReferenceVectorData - Create a validator that checks if all the lists in a compound dtype dataset are same length -- [ ] Make `target` optional in vectorIndex +- [ ] Move making `target` optional in vectorIndex from pydantic generator to linkml generators! Cleanup - [ ] Update pydantic generator diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index d3d8395..f8c8033 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -87,6 +87,14 @@ class NWBPydanticGenerator(PydanticGenerator): if not base_range_subsumes_any_of: raise ValueError("Slot cannot have both range and any_of defined") + def before_generate_slot(self, slot: SlotDefinition, sv: SchemaView) -> SlotDefinition: + """ + Force some properties to be optional + """ + if slot.name == "target" and "index" in slot.description: + slot.required = False + return slot + def after_generate_slot(self, slot: SlotResult, sv: SchemaView) -> SlotResult: """ - strip unwanted metadata diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index c34f757..faf9107 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -4,6 +4,7 @@ Special types for mimicking HDMF special case behavior from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Tuple, Union, overload +import numpy as np from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport from numpydantic import NDArray, Shape from pandas import DataFrame, Series @@ -11,13 +12,12 @@ from pydantic import ( BaseModel, ConfigDict, Field, - model_validator, - field_validator, - ValidatorFunctionWrapHandler, ValidationError, ValidationInfo, + ValidatorFunctionWrapHandler, + field_validator, + model_validator, ) -import numpy as np if TYPE_CHECKING: from nwb_linkml.models import VectorData, VectorIndex @@ -133,6 +133,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -152,6 +156,16 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -199,12 +213,6 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model - @model_validator(mode="before") - def create_id(cls, model: Dict[str, Any]) -> Dict: - """ - If an id column is not given, create one as an arange. - """ - @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -233,7 +241,7 @@ class DynamicTableMixin(BaseModel): Ensure that all columns are equal length """ lengths = [len(v) for v in self._columns.values()] - assert [l == lengths[0] for l in lengths], ( + assert [length == lengths[0] for length in lengths], ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -241,7 +249,9 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns(cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo): + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: """ If columns are supplied as arrays, try casting them to the type before validating """ @@ -299,7 +309,7 @@ class VectorDataMixin(BaseModel): try: return getattr(self.value, item) except AttributeError: - raise e + raise e from None def __len__(self) -> int: """ @@ -332,7 +342,7 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return [self.target.value[slice(start, end)]] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: @@ -363,7 +373,7 @@ class VectorIndexMixin(BaseModel): try: return getattr(self.value, item) except AttributeError: - raise e + raise e from None def __len__(self) -> int: """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 874bfe5..ab05d9d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -4,11 +4,21 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np metamodel_version = "None" version = "1.1.0" @@ -59,6 +69,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -73,6 +88,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -83,6 +119,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -90,19 +131,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -111,6 +152,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -130,6 +189,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -221,6 +281,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -240,9 +304,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -288,6 +383,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -325,7 +454,9 @@ class Index(Data): ) name: str = Field(...) - target: Data = Field(..., description="""Target dataset that this index applies to.""") + target: Optional[Data] = Field( + None, description="""Target dataset that this index applies to.""" + ) class VectorData(VectorDataMixin): @@ -351,8 +482,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 2818a13..7da6b05 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -4,11 +4,21 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np metamodel_version = "None" version = "1.1.2" @@ -59,6 +69,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -73,6 +88,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -83,6 +119,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -90,19 +131,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -111,6 +152,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -130,6 +189,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -221,6 +281,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -240,9 +304,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -288,6 +383,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -325,7 +454,9 @@ class Index(Data): ) name: str = Field(...) - target: Data = Field(..., description="""Target dataset that this index applies to.""") + target: Optional[Data] = Field( + None, description="""Target dataset that this index applies to.""" + ) class VectorData(VectorDataMixin): @@ -351,8 +482,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 12f24d7..a1ba281 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -4,11 +4,21 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np metamodel_version = "None" version = "1.1.3" @@ -59,6 +69,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -73,6 +88,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -83,6 +119,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -90,19 +131,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -111,6 +152,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -130,6 +189,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -221,6 +281,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -240,9 +304,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -288,6 +383,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -325,7 +454,9 @@ class Index(Data): ) name: str = Field(...) - target: Data = Field(..., description="""Target dataset that this index applies to.""") + target: Optional[Data] = Field( + None, description="""Target dataset that this index applies to.""" + ) class VectorData(VectorDataMixin): @@ -359,8 +490,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) value: Optional[NDArray[Shape["* num_rows"], Any]] = Field( None, json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index e7d72e4..bd4f1aa 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -4,12 +4,22 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np metamodel_version = "None" version = "1.2.0" @@ -60,6 +70,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -74,6 +89,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -84,6 +120,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -91,19 +132,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -112,6 +153,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -131,6 +190,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -222,6 +282,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -241,9 +305,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -289,6 +384,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -335,8 +464,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 86db0e3..2d676af 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -4,12 +4,22 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np metamodel_version = "None" version = "1.2.1" @@ -60,6 +70,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -74,6 +89,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -84,6 +120,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -91,19 +132,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -112,6 +153,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -131,6 +190,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -222,6 +282,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -241,9 +305,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -289,6 +384,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -335,8 +464,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index d221165..19f855b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -4,12 +4,22 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np metamodel_version = "None" version = "1.3.0" @@ -60,6 +70,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -74,6 +89,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -84,6 +120,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -91,19 +132,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -112,6 +153,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -131,6 +190,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -222,6 +282,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -241,9 +305,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -289,6 +384,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -335,8 +464,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index dbc86c7..1d1cbf4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -4,12 +4,22 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from numpydantic import NDArray, Shape -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np metamodel_version = "None" version = "1.4.0" @@ -60,6 +70,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -74,6 +89,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -84,6 +120,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -91,19 +132,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -112,6 +153,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -131,6 +190,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -222,6 +282,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -241,9 +305,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -289,6 +384,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -335,8 +464,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index f7edfe3..a911159 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -4,11 +4,21 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np from numpydantic import NDArray, Shape metamodel_version = "None" @@ -60,6 +70,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -74,6 +89,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -84,6 +120,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -91,19 +132,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -112,6 +153,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -131,6 +190,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -222,6 +282,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -241,9 +305,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -289,6 +384,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -335,8 +464,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 5d3a3bf..266c8d4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -4,11 +4,21 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np from numpydantic import NDArray, Shape metamodel_version = "None" @@ -60,6 +70,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -74,6 +89,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -84,6 +120,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -91,19 +132,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -112,6 +153,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -131,6 +190,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -222,6 +282,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -241,9 +305,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -289,6 +384,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -335,8 +464,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index e6f9d8c..b1a9b5a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -4,11 +4,21 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np from numpydantic import NDArray, Shape metamodel_version = "None" @@ -60,6 +70,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -74,6 +89,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -84,6 +120,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -91,19 +132,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -112,6 +153,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -131,6 +190,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -222,6 +282,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -241,9 +305,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -289,6 +384,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -335,8 +464,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 1875be8..ca52a0b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -4,11 +4,21 @@ from decimal import Decimal from enum import Enum import re import sys -import numpy as np from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + field_validator, + model_validator, + ValidationInfo, + ValidatorFunctionWrapHandler, + ValidationError, +) +import numpy as np from numpydantic import NDArray, Shape metamodel_version = "None" @@ -60,6 +70,11 @@ class VectorDataMixin(BaseModel): # redefined in `VectorData`, but included here for testing and type checking value: Optional[NDArray] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: if self._index: # Following hdmf, VectorIndex is the thing that knows how to do the slicing @@ -74,6 +89,27 @@ class VectorDataMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + class VectorIndexMixin(BaseModel): """ @@ -84,6 +120,11 @@ class VectorIndexMixin(BaseModel): value: Optional[NDArray] = None target: Optional["VectorData"] = None + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + def _getitem_helper(self, arg: int) -> Union[list, NDArray]: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` @@ -91,19 +132,19 @@ class VectorIndexMixin(BaseModel): start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.array[slice(start, end)] + return self.target.value[slice(start, end)] def __getitem__(self, item: Union[int, slice]) -> Any: if self.target is None: return self.value[item] - elif type(self.target).__name__ == "VectorData": + elif isinstance(self.target, VectorData): if isinstance(item, int): return self._getitem_helper(item) else: idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -112,6 +153,24 @@ class VectorIndexMixin(BaseModel): else: self.value[key] = value + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Get length from value + """ + return len(self.value) + class DynamicTableMixin(BaseModel): """ @@ -131,6 +190,7 @@ class DynamicTableMixin(BaseModel): # overridden by subclass but implemented here for testing and typechecking purposes :) colnames: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None @property def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: @@ -222,6 +282,10 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) + elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + # special case where we are returning a row in a ragged array, + # same as above - prevent pandas pivoting to long + val = Series([val]) data[k] = val return data @@ -241,9 +305,40 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:, :], item) + except AttributeError: + raise e from None + @model_validator(mode="before") @classmethod - def create_colnames(cls, model: Dict[str, Any]) -> None: + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_COLUMN_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + + @model_validator(mode="before") + @classmethod + def create_colnames(cls, model: Dict[str, Any]) -> Dict: """ Construct colnames from arguments. @@ -289,6 +384,40 @@ class DynamicTableMixin(BaseModel): idx.target = col return self + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._columns.values()] + assert [length == lengths[0] for length in lengths], ( + "Columns are not of equal length! " + f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" + ) + return self + + @field_validator("*", mode="wrap") + @classmethod + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: + """ + If columns are supplied as arrays, try casting them to the type before validating + """ + try: + return handler(val) + except ValidationError: + annotation = cls.model_fields[info.field_name].annotation + if type(annotation).__name__ == "_UnionGenericAlias": + annotation = annotation.__args__[0] + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) + ) + linkml_meta = LinkMLMeta( { @@ -335,8 +464,8 @@ class VectorIndex(VectorIndexMixin): ) name: str = Field(...) - target: VectorData = Field( - ..., description="""Reference to the target dataset that this index applies to.""" + target: Optional[VectorData] = Field( + None, description="""Reference to the target dataset that this index applies to.""" ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 7dbb253..d4ab5b4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -1,22 +1,25 @@ from __future__ import annotations - - -from ...hdmf_common.v1_8_0.hdmf_common_base import Data +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from pydantic import ( BaseModel, ConfigDict, Field, RootModel, - model_validator, field_validator, + model_validator, ValidationInfo, ValidatorFunctionWrapHandler, ValidationError, ) -from numpydantic import NDArray, Shape import numpy as np +from numpydantic import NDArray, Shape metamodel_version = "None" version = "1.8.0" @@ -96,7 +99,7 @@ class VectorDataMixin(BaseModel): try: return getattr(self.value, item) except AttributeError: - raise e + raise e from None def __len__(self) -> int: """ @@ -141,7 +144,7 @@ class VectorIndexMixin(BaseModel): idx = range(*item.indices(len(self.value))) return [self._getitem_helper(i) for i in idx] else: - raise NotImplementedError("DynamicTableRange not supported yet") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -160,7 +163,7 @@ class VectorIndexMixin(BaseModel): try: return getattr(self.value, item) except AttributeError: - raise e + raise e from None def __len__(self) -> int: """ @@ -302,7 +305,7 @@ class DynamicTableMixin(BaseModel): return super().__setattr__(key, value) - def __getattr__(self, item): + def __getattr__(self, item: str) -> Any: """Try and use pandas df attrs if we don't have them""" try: return BaseModel.__getattr__(self, item) @@ -310,7 +313,7 @@ class DynamicTableMixin(BaseModel): try: return getattr(self[:, :], item) except AttributeError: - raise e + raise e from None @model_validator(mode="before") @classmethod @@ -387,7 +390,7 @@ class DynamicTableMixin(BaseModel): Ensure that all columns are equal length """ lengths = [len(v) for v in self._columns.values()] - assert [l == lengths[0] for l in lengths], ( + assert [length == lengths[0] for length in lengths], ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -395,7 +398,9 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns(cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo): + def cast_columns( + cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo + ) -> Any: """ If columns are supplied as arrays, try casting them to the type before validating """ diff --git a/pyproject.toml b/pyproject.toml index 2ea2cdc..eae490f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,6 +70,8 @@ ignore = [ "UP006", "UP035", # | for Union types (only supported >=3.10 "UP007", "UP038", + # syntax error in forward annotation with numpydantic + "F722" ] fixable = ["ALL"] From edea802ff1ad68d1760aad16f73817616c60ab38 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 6 Aug 2024 20:35:23 -0700 Subject: [PATCH 26/61] remove dask, nptyping --- nwb_linkml/pdm.lock | 328 ++++++++------------- nwb_linkml/pyproject.toml | 2 - nwb_linkml/src/nwb_linkml/maps/__init__.py | 3 +- nwb_linkml/src/nwb_linkml/maps/dtype.py | 48 +-- nwb_linkml/src/nwb_linkml/maps/hdmf.py | 62 +--- nwb_linkml/src/nwb_linkml/monkeypatch.py | 52 ---- nwb_linkml/tests/test_maps/test_dtype.py | 11 - 7 files changed, 124 insertions(+), 382 deletions(-) delete mode 100644 nwb_linkml/tests/test_maps/test_dtype.py diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock index d81da1e..51a3fcc 100644 --- a/nwb_linkml/pdm.lock +++ b/nwb_linkml/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "plot", "tests"] strategy = ["inherit_metadata"] lock_version = "4.5.0" -content_hash = "sha256:6b84e75b424a643bdfbc4cb0d775e6db831b27c7a770097e28dc8eb8bf76b54d" +content_hash = "sha256:903c3aeebf0fb234263b45213693f0eaee7ac290d22633b1d7a4d5aff51d032b" [[metadata.targets]] requires_python = ">=3.10,<3.13" @@ -53,7 +53,7 @@ files = [ [[package]] name = "attrs" -version = "24.1.0" +version = "24.2.0" requires_python = ">=3.7" summary = "Classes Without Boilerplate" groups = ["default", "dev", "tests"] @@ -61,8 +61,8 @@ dependencies = [ "importlib-metadata; python_version < \"3.8\"", ] files = [ - {file = "attrs-24.1.0-py3-none-any.whl", hash = "sha256:377b47448cb61fea38533f671fba0d0f8a96fd58facd4dc518e3dac9dbea0905"}, - {file = "attrs-24.1.0.tar.gz", hash = "sha256:adbdec84af72d38be7628e353a09b6a6790d15cd71819f6e9d7b0faa8a125745"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [[package]] @@ -229,17 +229,6 @@ files = [ {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] -[[package]] -name = "cloudpickle" -version = "3.0.0" -requires_python = ">=3.8" -summary = "Pickler class to extend the standard pickle.Pickler functionality" -groups = ["default"] -files = [ - {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, - {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, -] - [[package]] name = "colorama" version = "0.4.6" @@ -414,27 +403,6 @@ files = [ {file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"}, ] -[[package]] -name = "dask" -version = "2024.7.1" -requires_python = ">=3.9" -summary = "Parallel PyData with Task Scheduling" -groups = ["default"] -dependencies = [ - "click>=8.1", - "cloudpickle>=1.5.0", - "fsspec>=2021.09.0", - "importlib-metadata>=4.13.0; python_version < \"3.12\"", - "packaging>=20.0", - "partd>=1.4.0", - "pyyaml>=5.3.1", - "toolz>=0.10.0", -] -files = [ - {file = "dask-2024.7.1-py3-none-any.whl", hash = "sha256:dd046840050376c317de90629db5c6197adda820176cf3e2df10c3219d11951f"}, - {file = "dask-2024.7.1.tar.gz", hash = "sha256:dbaef2d50efee841a9d981a218cfeb50392fc9a95e0403b6d680450e4f50d531"}, -] - [[package]] name = "deprecated" version = "1.2.14" @@ -514,17 +482,6 @@ files = [ {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, ] -[[package]] -name = "fsspec" -version = "2024.6.1" -requires_python = ">=3.8" -summary = "File-system specification" -groups = ["default"] -files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, -] - [[package]] name = "future-fstrings" version = "1.2.0" @@ -651,7 +608,7 @@ name = "importlib-metadata" version = "8.2.0" requires_python = ">=3.8" summary = "Read metadata from Python packages" -groups = ["default", "dev", "plot", "tests"] +groups = ["dev", "plot", "tests"] dependencies = [ "typing-extensions>=3.6.4; python_version < \"3.8\"", "zipp>=0.5", @@ -939,17 +896,6 @@ files = [ {file = "linkml_runtime-1.8.0.tar.gz", hash = "sha256:436381a7bf791e9af4ef0a5adcac86762d451b77670fbdb3ba083d2c177fb5f2"}, ] -[[package]] -name = "locket" -version = "1.0.0" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -summary = "File-based locks for Python on Linux and Windows" -groups = ["default"] -files = [ - {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"}, - {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, -] - [[package]] name = "markdown-it-py" version = "3.0.0" @@ -1048,54 +994,44 @@ files = [ {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, ] -[[package]] -name = "nptyping" -version = "2.5.0" -requires_python = ">=3.7" -summary = "Type hints for NumPy." -groups = ["default"] -dependencies = [ - "numpy<2.0.0,>=1.20.0; python_version >= \"3.8\"", - "numpy==1.21.5; python_version < \"3.8\"", - "typing-extensions<5.0.0,>=4.0.0; python_version < \"3.10\"", -] -files = [ - {file = "nptyping-2.5.0-py3-none-any.whl", hash = "sha256:764e51836faae33a7ae2e928af574cfb701355647accadcc89f2ad793630b7c8"}, - {file = "nptyping-2.5.0.tar.gz", hash = "sha256:e3d35b53af967e6fb407c3016ff9abae954d3a0568f7cc13a461084224e8e20a"}, -] - [[package]] name = "numpy" -version = "1.26.4" +version = "2.0.1" requires_python = ">=3.9" summary = "Fundamental package for array computing in Python" groups = ["default"] files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"}, + {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"}, + {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"}, + {file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"}, + {file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"}, + {file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"}, + {file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"}, + {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"}, + {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"}, + {file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"}, + {file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"}, + {file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"}, + {file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"}, + {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"}, + {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"}, + {file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"}, + {file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"}, + {file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"}, + {file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"}, + {file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"}, ] [[package]] @@ -1203,21 +1139,6 @@ files = [ {file = "parse-1.20.2.tar.gz", hash = "sha256:b41d604d16503c79d81af5165155c0b20f6c8d6c559efa66b4b695c3e5a0a0ce"}, ] -[[package]] -name = "partd" -version = "1.4.2" -requires_python = ">=3.9" -summary = "Appendable key-value storage" -groups = ["default"] -dependencies = [ - "locket", - "toolz", -] -files = [ - {file = "partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f"}, - {file = "partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c"}, -] - [[package]] name = "pathspec" version = "0.12.1" @@ -1616,35 +1537,39 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" -requires_python = ">=3.6" +version = "6.0.2" +requires_python = ">=3.8" summary = "YAML parser and emitter for Python" groups = ["default"] files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -1796,63 +1721,63 @@ files = [ [[package]] name = "rpds-py" -version = "0.19.1" +version = "0.20.0" requires_python = ">=3.8" summary = "Python bindings to Rust's persistent data structures (rpds)" groups = ["default"] files = [ - {file = "rpds_py-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:aaf71f95b21f9dc708123335df22e5a2fef6307e3e6f9ed773b2e0938cc4d491"}, - {file = "rpds_py-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca0dda0c5715efe2ab35bb83f813f681ebcd2840d8b1b92bfc6fe3ab382fae4a"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81db2e7282cc0487f500d4db203edc57da81acde9e35f061d69ed983228ffe3b"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1a8dfa125b60ec00c7c9baef945bb04abf8ac772d8ebefd79dae2a5f316d7850"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271accf41b02687cef26367c775ab220372ee0f4925591c6796e7c148c50cab5"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9bc4161bd3b970cd6a6fcda70583ad4afd10f2750609fb1f3ca9505050d4ef3"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0cf2a0dbb5987da4bd92a7ca727eadb225581dd9681365beba9accbe5308f7d"}, - {file = "rpds_py-0.19.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b5e28e56143750808c1c79c70a16519e9bc0a68b623197b96292b21b62d6055c"}, - {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c7af6f7b80f687b33a4cdb0a785a5d4de1fb027a44c9a049d8eb67d5bfe8a687"}, - {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e429fc517a1c5e2a70d576077231538a98d59a45dfc552d1ac45a132844e6dfb"}, - {file = "rpds_py-0.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d2dbd8f4990d4788cb122f63bf000357533f34860d269c1a8e90ae362090ff3a"}, - {file = "rpds_py-0.19.1-cp310-none-win32.whl", hash = "sha256:e0f9d268b19e8f61bf42a1da48276bcd05f7ab5560311f541d22557f8227b866"}, - {file = "rpds_py-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:df7c841813f6265e636fe548a49664c77af31ddfa0085515326342a751a6ba51"}, - {file = "rpds_py-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:902cf4739458852fe917104365ec0efbea7d29a15e4276c96a8d33e6ed8ec137"}, - {file = "rpds_py-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f3d73022990ab0c8b172cce57c69fd9a89c24fd473a5e79cbce92df87e3d9c48"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3837c63dd6918a24de6c526277910e3766d8c2b1627c500b155f3eecad8fad65"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cdb7eb3cf3deb3dd9e7b8749323b5d970052711f9e1e9f36364163627f96da58"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26ab43b6d65d25b1a333c8d1b1c2f8399385ff683a35ab5e274ba7b8bb7dc61c"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75130df05aae7a7ac171b3b5b24714cffeabd054ad2ebc18870b3aa4526eba23"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c34f751bf67cab69638564eee34023909380ba3e0d8ee7f6fe473079bf93f09b"}, - {file = "rpds_py-0.19.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2671cb47e50a97f419a02cd1e0c339b31de017b033186358db92f4d8e2e17d8"}, - {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c73254c256081704dba0a333457e2fb815364018788f9b501efe7c5e0ada401"}, - {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4383beb4a29935b8fa28aca8fa84c956bf545cb0c46307b091b8d312a9150e6a"}, - {file = "rpds_py-0.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dbceedcf4a9329cc665452db1aaf0845b85c666e4885b92ee0cddb1dbf7e052a"}, - {file = "rpds_py-0.19.1-cp311-none-win32.whl", hash = "sha256:f0a6d4a93d2a05daec7cb885157c97bbb0be4da739d6f9dfb02e101eb40921cd"}, - {file = "rpds_py-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:c149a652aeac4902ecff2dd93c3b2681c608bd5208c793c4a99404b3e1afc87c"}, - {file = "rpds_py-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:56313be667a837ff1ea3508cebb1ef6681d418fa2913a0635386cf29cff35165"}, - {file = "rpds_py-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d1d7539043b2b31307f2c6c72957a97c839a88b2629a348ebabe5aa8b626d6b"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1dc59a5e7bc7f44bd0c048681f5e05356e479c50be4f2c1a7089103f1621d5"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8f78398e67a7227aefa95f876481485403eb974b29e9dc38b307bb6eb2315ea"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef07a0a1d254eeb16455d839cef6e8c2ed127f47f014bbda64a58b5482b6c836"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8124101e92c56827bebef084ff106e8ea11c743256149a95b9fd860d3a4f331f"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08ce9c95a0b093b7aec75676b356a27879901488abc27e9d029273d280438505"}, - {file = "rpds_py-0.19.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b02dd77a2de6e49078c8937aadabe933ceac04b41c5dde5eca13a69f3cf144e"}, - {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4dd02e29c8cbed21a1875330b07246b71121a1c08e29f0ee3db5b4cfe16980c4"}, - {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9c7042488165f7251dc7894cd533a875d2875af6d3b0e09eda9c4b334627ad1c"}, - {file = "rpds_py-0.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f809a17cc78bd331e137caa25262b507225854073fd319e987bd216bed911b7c"}, - {file = "rpds_py-0.19.1-cp312-none-win32.whl", hash = "sha256:3ddab996807c6b4227967fe1587febade4e48ac47bb0e2d3e7858bc621b1cace"}, - {file = "rpds_py-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:32e0db3d6e4f45601b58e4ac75c6f24afbf99818c647cc2066f3e4b192dabb1f"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d5c7e32f3ee42f77d8ff1a10384b5cdcc2d37035e2e3320ded909aa192d32c3"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:89cc8921a4a5028d6dd388c399fcd2eef232e7040345af3d5b16c04b91cf3c7e"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca34e913d27401bda2a6f390d0614049f5a95b3b11cd8eff80fe4ec340a1208"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5953391af1405f968eb5701ebbb577ebc5ced8d0041406f9052638bafe52209d"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:840e18c38098221ea6201f091fc5d4de6128961d2930fbbc96806fb43f69aec1"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d8b735c4d162dc7d86a9cf3d717f14b6c73637a1f9cd57fe7e61002d9cb1972"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce757c7c90d35719b38fa3d4ca55654a76a40716ee299b0865f2de21c146801c"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a9421b23c85f361a133aa7c5e8ec757668f70343f4ed8fdb5a4a14abd5437244"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3b823be829407393d84ee56dc849dbe3b31b6a326f388e171555b262e8456cc1"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:5e58b61dcbb483a442c6239c3836696b79f2cd8e7eec11e12155d3f6f2d886d1"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39d67896f7235b2c886fb1ee77b1491b77049dcef6fbf0f401e7b4cbed86bbd4"}, - {file = "rpds_py-0.19.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8b32cd4ab6db50c875001ba4f5a6b30c0f42151aa1fbf9c2e7e3674893fb1dc4"}, - {file = "rpds_py-0.19.1.tar.gz", hash = "sha256:31dd5794837f00b46f4096aa8ccaa5972f73a938982e32ed817bb520c465e520"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, + {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, + {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, + {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, + {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, + {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, + {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, + {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] [[package]] @@ -2080,17 +2005,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "toolz" -version = "0.12.1" -requires_python = ">=3.7" -summary = "List processing tools and functional utilities" -groups = ["default"] -files = [ - {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, - {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, -] - [[package]] name = "tqdm" version = "4.66.5" @@ -2276,7 +2190,7 @@ name = "zipp" version = "3.19.2" requires_python = ">=3.8" summary = "Backport of pathlib-compatible object wrapper for zip files" -groups = ["default", "dev", "plot", "tests"] +groups = ["dev", "plot", "tests"] files = [ {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index 8455bed..6efc111 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -15,11 +15,9 @@ dependencies = [ "rich>=13.5.2", #"linkml>=1.7.10", "linkml @ git+https://github.com/sneakers-the-rat/linkml@nwb-linkml", - "nptyping>=2.5.0", "pydantic>=2.3.0", "h5py>=3.9.0", "pydantic-settings>=2.0.3", - "dask>=2023.9.2", "tqdm>=4.66.1", 'typing-extensions>=4.12.2;python_version<"3.11"', "numpydantic>=1.3.0", diff --git a/nwb_linkml/src/nwb_linkml/maps/__init__.py b/nwb_linkml/src/nwb_linkml/maps/__init__.py index cb7f329..8b01447 100644 --- a/nwb_linkml/src/nwb_linkml/maps/__init__.py +++ b/nwb_linkml/src/nwb_linkml/maps/__init__.py @@ -2,7 +2,7 @@ Mapping from one domain to another """ -from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np, flat_to_nptyping +from nwb_linkml.maps.dtype import flat_to_linkml, flat_to_np from nwb_linkml.maps.map import Map from nwb_linkml.maps.postload import MAP_HDMF_DATATYPE_DEF, MAP_HDMF_DATATYPE_INC from nwb_linkml.maps.quantity import QUANTITY_MAP @@ -14,5 +14,4 @@ __all__ = [ "Map", "flat_to_linkml", "flat_to_np", - "flat_to_nptyping", ] diff --git a/nwb_linkml/src/nwb_linkml/maps/dtype.py b/nwb_linkml/src/nwb_linkml/maps/dtype.py index 26d25a6..1c90e0d 100644 --- a/nwb_linkml/src/nwb_linkml/maps/dtype.py +++ b/nwb_linkml/src/nwb_linkml/maps/dtype.py @@ -3,9 +3,8 @@ Dtype mappings """ from datetime import datetime -from typing import Any, Type +from typing import Any -import nptyping import numpy as np from nwb_schema_language import CompoundDtype, DTypeType, FlatDtype, ReferenceDtype @@ -40,37 +39,6 @@ flat_to_linkml = { Map between the flat data types and the simpler linkml base types """ -flat_to_nptyping = { - "float": "Float", - "float32": "Float32", - "double": "Double", - "float64": "Float64", - "long": "LongLong", - "int64": "Int64", - "int": "Int", - "int32": "Int32", - "int16": "Int16", - "short": "Short", - "int8": "Int8", - "uint": "UInt", - "uint32": "UInt32", - "uint16": "UInt16", - "uint8": "UInt8", - "uint64": "UInt64", - "numeric": "Number", - "text": "String", - "utf": "Unicode", - "utf8": "Unicode", - "utf_8": "Unicode", - "string": "Unicode", - "str": "Unicode", - "ascii": "String", - "bool": "Bool", - "isodatetime": "Datetime64", - "AnyType": "Any", - "object": "Object", -} - flat_to_np = { "float": float, "float32": np.float32, @@ -175,20 +143,6 @@ https://github.com/hdmf-dev/hdmf/blob/ddc842b5c81d96e0b957b96e88533b16c137e206/s """ -def struct_from_dtype(dtype: np.dtype) -> Type[nptyping.Structure]: - """ - Create a nptyping Structure from a compound numpy dtype - - nptyping structures have the form:: - - Structure["name: Str, age: Int"] - - """ - struct_pieces = [f"{k}: {flat_to_nptyping[v[0].name]}" for k, v in dtype.fields.items()] - struct_dtype = ", ".join(struct_pieces) - return nptyping.Structure[struct_dtype] - - def handle_dtype(dtype: DTypeType | None) -> str: """ Get the string form of a dtype diff --git a/nwb_linkml/src/nwb_linkml/maps/hdmf.py b/nwb_linkml/src/nwb_linkml/maps/hdmf.py index b2c552d..3c52b22 100644 --- a/nwb_linkml/src/nwb_linkml/maps/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/maps/hdmf.py @@ -2,69 +2,9 @@ Mapping functions for handling HDMF classes like DynamicTables """ -from typing import Any, List, Optional, Type +from typing import Any, List, Optional -import dask.array as da import h5py -import numpy as np -from numpydantic import NDArray -from numpydantic.interface.hdf5 import H5ArrayPath -from pydantic import BaseModel, create_model - -from nwb_linkml.maps.dtype import struct_from_dtype -from nwb_linkml.types.hdf5 import HDF5_Path - - -def model_from_dynamictable(group: h5py.Group, base: Optional[BaseModel] = None) -> Type[BaseModel]: - """ - Create a pydantic model from a dynamic table - """ - colnames = group.attrs["colnames"] - types = {} - for col in colnames: - - nptype = group[col].dtype - nptype = struct_from_dtype(nptype) if nptype.type == np.void else nptype.type - - type_ = Optional[NDArray[Any, nptype]] - - # FIXME: handling nested column types that appear only in some versions? - # types[col] = (List[type_ | None], ...) - types[col] = (type_, None) - - model = create_model(group.name.split("/")[-1], **types, __base__=base) - return model - - -def dynamictable_to_model( - group: h5py.Group, - model: Optional[Type[BaseModel]] = None, - base: Optional[Type[BaseModel]] = None, -) -> BaseModel: - """ - Instantiate a dynamictable model - - Calls :func:`.model_from_dynamictable` if ``model`` is not provided. - """ - if model is None: - model = model_from_dynamictable(group, base) - - items = {} - for col, col_type in model.model_fields.items(): - if col not in group: - if col in group.attrs: - items[col] = group.attrs[col] - continue - - if col_type.annotation is HDF5_Path: - items[col] = [HDF5_Path(group[d].name) for d in group[col][:]] - else: - try: - items[col] = da.from_array(group[col]) - except NotImplementedError: - items[col] = H5ArrayPath(file=group.file.filename, path=group[col].name) - - return model.model_construct(hdf5_path=group.name, name=group.name.split("/")[-1], **items) def dereference_reference_vector(dset: h5py.Dataset, data: Optional[List[Any]]) -> List: diff --git a/nwb_linkml/src/nwb_linkml/monkeypatch.py b/nwb_linkml/src/nwb_linkml/monkeypatch.py index d9da2c5..6222089 100644 --- a/nwb_linkml/src/nwb_linkml/monkeypatch.py +++ b/nwb_linkml/src/nwb_linkml/monkeypatch.py @@ -5,56 +5,6 @@ Monkeypatches to external modules # ruff: noqa: ANN001 - not well defined types for this module -def patch_npytyping_perf() -> None: - """ - npytyping makes an expensive call to inspect.stack() - that makes imports of pydantic models take ~200x longer than - they should: - - References: - - https://github.com/ramonhagenaars/nptyping/issues/110 - """ - import inspect - from types import FrameType - - from nptyping import base_meta_classes, ndarray, recarray - from nptyping.pandas_ import dataframe - - # make a new __module__ methods for the affected classes - def new_module_ndarray(cls) -> str: - return cls._get_module(inspect.currentframe(), "nptyping.ndarray") - - def new_module_recarray(cls) -> str: - return cls._get_module(inspect.currentframe(), "nptyping.recarray") - - def new_module_dataframe(cls) -> str: - return cls._get_module(inspect.currentframe(), "nptyping.pandas_.dataframe") - - # and a new _get_module method for the parent class - def new_get_module(cls, stack: FrameType, module: str) -> str: - return ( - "typing" - if inspect.getframeinfo(stack.f_back).function == "formatannotation" - else module - ) - - # now apply the patches - ndarray.NDArrayMeta.__module__ = property(new_module_ndarray) - recarray.RecArrayMeta.__module__ = property(new_module_recarray) - dataframe.DataFrameMeta.__module__ = property(new_module_dataframe) - base_meta_classes.SubscriptableMeta._get_module = new_get_module - - -def patch_nptyping_warnings() -> None: - """ - nptyping shits out a bunch of numpy deprecation warnings from using - olde aliases - """ - import warnings - - warnings.filterwarnings("ignore", category=DeprecationWarning, module="nptyping.*") - - def patch_schemaview() -> None: """ Patch schemaview to correctly resolve multiple layers of relative imports. @@ -211,8 +161,6 @@ def patch_pretty_print() -> None: def apply_patches() -> None: """Apply all monkeypatches""" - patch_npytyping_perf() - patch_nptyping_warnings() patch_schemaview() patch_array_expression() patch_pretty_print() diff --git a/nwb_linkml/tests/test_maps/test_dtype.py b/nwb_linkml/tests/test_maps/test_dtype.py deleted file mode 100644 index 569b262..0000000 --- a/nwb_linkml/tests/test_maps/test_dtype.py +++ /dev/null @@ -1,11 +0,0 @@ -import nptyping -import numpy as np - -from nwb_linkml.maps.dtype import struct_from_dtype - - -def test_struct_from_dtype(): - # Super weak test with fixed values, will expand with parameterize if needed - np_dtype = np.dtype([("name1", "int32"), ("name2", "object"), ("name3", "str")]) - struct = struct_from_dtype(np_dtype) - assert struct == nptyping.Structure["name1: Int32, name2: Object, name3: Unicode"] From 3ee7c68e1531fcd6530300ee4aa6f409b1f5452e Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 6 Aug 2024 20:41:00 -0700 Subject: [PATCH 27/61] fix array casting for dtypes that have a shape attr but nothing in it --- nwb_linkml/src/nwb_linkml/maps/hdf5.py | 116 +++++++++--------- .../hdmf_common/v1_8_0/hdmf_common_table.py | 11 +- nwb_linkml/tests/test_maps/test_hdmf.py | 24 ---- 3 files changed, 61 insertions(+), 90 deletions(-) delete mode 100644 nwb_linkml/tests/test_maps/test_hdmf.py diff --git a/nwb_linkml/src/nwb_linkml/maps/hdf5.py b/nwb_linkml/src/nwb_linkml/maps/hdf5.py index 8ebfd85..a7b052f 100644 --- a/nwb_linkml/src/nwb_linkml/maps/hdf5.py +++ b/nwb_linkml/src/nwb_linkml/maps/hdf5.py @@ -23,7 +23,6 @@ from pydantic import BaseModel, ConfigDict, Field from nwb_linkml.annotations import unwrap_optional from nwb_linkml.maps import Map -from nwb_linkml.maps.hdmf import dynamictable_to_model from nwb_linkml.types.hdf5 import HDF5_Path if sys.version_info.minor >= 11: @@ -234,63 +233,64 @@ class PruneEmpty(HDF5Map): return H5ReadResult.model_construct(path=src.path, source=src, completed=True) -class ResolveDynamicTable(HDF5Map): - """ - Handle loading a dynamic table! - - Dynamic tables are sort of odd in that their models don't include their fields - (except as a list of strings in ``colnames`` ), - so we need to create a new model that includes fields for each column, - and then we include the datasets as :class:`~numpydantic.interface.hdf5.H5ArrayPath` - objects which lazy load the arrays in a thread/process safe way. - - This map also resolves the child elements, - indicating so by the ``completes`` field in the :class:`.ReadResult` - """ - - phase = ReadPhases.read - priority = 1 - - @classmethod - def check( - cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult] - ) -> bool: - if src.h5_type == "dataset": - return False - if "neurodata_type" in src.attrs: - if src.attrs["neurodata_type"] == "DynamicTable": - return True - # otherwise, see if it's a subclass - model = provider.get_class(src.attrs["namespace"], src.attrs["neurodata_type"]) - # just inspect the MRO as strings rather than trying to check subclasses because - # we might replace DynamicTable in the future, and there isn't a stable DynamicTable - # class to inherit from anyway because of the whole multiple versions thing - parents = [parent.__name__ for parent in model.__mro__] - return "DynamicTable" in parents - else: - return False - - @classmethod - def apply( - cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult] - ) -> H5ReadResult: - with h5py.File(src.h5f_path, "r") as h5f: - obj = h5f.get(src.path) - - # make a populated model :) - base_model = provider.get_class(src.namespace, src.neurodata_type) - model = dynamictable_to_model(obj, base=base_model) - - completes = [HDF5_Path(child.name) for child in obj.values()] - - return H5ReadResult( - path=src.path, - source=src, - result=model, - completes=completes, - completed=True, - applied=["ResolveDynamicTable"], - ) +# +# class ResolveDynamicTable(HDF5Map): +# """ +# Handle loading a dynamic table! +# +# Dynamic tables are sort of odd in that their models don't include their fields +# (except as a list of strings in ``colnames`` ), +# so we need to create a new model that includes fields for each column, +# and then we include the datasets as :class:`~numpydantic.interface.hdf5.H5ArrayPath` +# objects which lazy load the arrays in a thread/process safe way. +# +# This map also resolves the child elements, +# indicating so by the ``completes`` field in the :class:`.ReadResult` +# """ +# +# phase = ReadPhases.read +# priority = 1 +# +# @classmethod +# def check( +# cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult] +# ) -> bool: +# if src.h5_type == "dataset": +# return False +# if "neurodata_type" in src.attrs: +# if src.attrs["neurodata_type"] == "DynamicTable": +# return True +# # otherwise, see if it's a subclass +# model = provider.get_class(src.attrs["namespace"], src.attrs["neurodata_type"]) +# # just inspect the MRO as strings rather than trying to check subclasses because +# # we might replace DynamicTable in the future, and there isn't a stable DynamicTable +# # class to inherit from anyway because of the whole multiple versions thing +# parents = [parent.__name__ for parent in model.__mro__] +# return "DynamicTable" in parents +# else: +# return False +# +# @classmethod +# def apply( +# cls, src: H5SourceItem, provider: "SchemaProvider", completed: Dict[str, H5ReadResult] +# ) -> H5ReadResult: +# with h5py.File(src.h5f_path, "r") as h5f: +# obj = h5f.get(src.path) +# +# # make a populated model :) +# base_model = provider.get_class(src.namespace, src.neurodata_type) +# model = dynamictable_to_model(obj, base=base_model) +# +# completes = [HDF5_Path(child.name) for child in obj.values()] +# +# return H5ReadResult( +# path=src.path, +# source=src, +# result=model, +# completes=completes, +# completed=True, +# applied=["ResolveDynamicTable"], +# ) class ResolveModelGroup(HDF5Map): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index d4ab5b4..e6e364b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -1,12 +1,7 @@ from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container +from ...hdmf_common.v1_8_0.hdmf_common_base import Data from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Dict, Optional, Union, overload, Tuple from pydantic import ( BaseModel, ConfigDict, @@ -282,7 +277,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and val.shape[0] > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/tests/test_maps/test_hdmf.py b/nwb_linkml/tests/test_maps/test_hdmf.py deleted file mode 100644 index b6b66dc..0000000 --- a/nwb_linkml/tests/test_maps/test_hdmf.py +++ /dev/null @@ -1,24 +0,0 @@ -import time - -import h5py -import pytest - -from nwb_linkml.maps.hdmf import dynamictable_to_model, model_from_dynamictable - -NWBFILE = "/Users/jonny/Dropbox/lab/p2p_ld/data/nwb/sub-738651046_ses-760693773.nwb" - - -@pytest.mark.xfail() -@pytest.mark.parametrize("dataset", ["aibs.nwb"]) -def test_make_dynamictable(data_dir, dataset): - nwbfile = data_dir / dataset - h5f = h5py.File(nwbfile, "r") - group = h5f["units"] - - start_time = time.time() - model = model_from_dynamictable(group) - data = dynamictable_to_model(group, model) - - _ = data.model_dump_json() - end_time = time.time() - total_time = end_time - start_time From a993ee10f2f9da3e79e90d6da268d1a449fb431c Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 6 Aug 2024 21:40:23 -0700 Subject: [PATCH 28/61] continue removing nptyping, actually fix indexing --- docs/meta/todo.md | 2 +- docs/pdm.lock | 1068 +++++++---------- docs/pyproject.toml | 1 - .../src/nwb_linkml/adapters/namespaces.py | 5 +- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 26 +- nwb_linkml/src/nwb_linkml/io/hdf5.py | 5 +- nwb_linkml/src/nwb_linkml/maps/dtype.py | 3 +- .../hdmf_common/v1_1_0/hdmf_common_table.py | 16 +- .../hdmf_common/v1_1_2/hdmf_common_table.py | 16 +- .../hdmf_common/v1_1_3/hdmf_common_table.py | 16 +- .../hdmf_common/v1_2_0/hdmf_common_table.py | 16 +- .../hdmf_common/v1_2_1/hdmf_common_table.py | 16 +- .../hdmf_common/v1_3_0/hdmf_common_table.py | 16 +- .../hdmf_common/v1_4_0/hdmf_common_table.py | 16 +- .../hdmf_common/v1_5_0/hdmf_common_table.py | 16 +- .../hdmf_common/v1_5_1/hdmf_common_table.py | 16 +- .../hdmf_common/v1_6_0/hdmf_common_table.py | 16 +- .../hdmf_common/v1_7_0/hdmf_common_table.py | 16 +- .../hdmf_common/v1_8_0/hdmf_common_table.py | 25 +- nwb_linkml/src/nwb_linkml/types/df.py | 335 +++--- nwb_linkml/tests/test_includes/test_hdmf.py | 16 +- .../test_providers/test_provider_schema.py | 7 +- pyproject.toml | 4 + scripts/generate_core.py | 3 + 24 files changed, 863 insertions(+), 813 deletions(-) diff --git a/docs/meta/todo.md b/docs/meta/todo.md index 92355cf..d2bf9ac 100644 --- a/docs/meta/todo.md +++ b/docs/meta/todo.md @@ -23,7 +23,7 @@ Cleanup - [ ] Make a minimal pydanticgen-only package to slim linkml deps? - [ ] Disambiguate "maps" terminology - split out simple maps from the eg. dataset mapping classes -- [ ] Remove unnecessary imports +- [x] Remove unnecessary imports - dask - nptyping - [ ] Adapt the split generation to the new split generator style diff --git a/docs/pdm.lock b/docs/pdm.lock index db4920e..0dcaca2 100644 --- a/docs/pdm.lock +++ b/docs/pdm.lock @@ -3,9 +3,12 @@ [metadata] groups = ["default"] -strategy = ["cross_platform", "inherit_metadata"] -lock_version = "4.4.2" -content_hash = "sha256:42dbf7249d28b6d1ad06ccd4593eac866228ad47d884ea8f4b118d5331da2aa0" +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:6d950420dfea0b5a96435adc77fb4ee411fcada547a1c55e6f1dc74f6ba6b199" + +[[metadata.targets]] +requires_python = ">=3.10,<3.13" [[package]] name = "alabaster" @@ -24,6 +27,9 @@ version = "0.7.0" requires_python = ">=3.8" summary = "Reusable constraint types to use with typing.Annotated" groups = ["default"] +dependencies = [ + "typing-extensions>=4.0.0; python_version < \"3.9\"", +] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -34,6 +40,9 @@ name = "antlr4-python3-runtime" version = "4.9.3" summary = "ANTLR 4.9.3 runtime for Python 3.7" groups = ["default"] +dependencies = [ + "typing; python_version < \"3.5\"", +] files = [ {file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"}, ] @@ -89,6 +98,7 @@ summary = "Annotate AST trees with source code positions" groups = ["default"] dependencies = [ "six>=1.12.0", + "typing; python_version < \"3.5\"", ] files = [ {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, @@ -97,13 +107,16 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" requires_python = ">=3.7" summary = "Classes Without Boilerplate" groups = ["default"] +dependencies = [ + "importlib-metadata; python_version < \"3.8\"", +] files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [[package]] @@ -114,6 +127,7 @@ summary = "Seamlessly integrate pydantic models in your Sphinx documentation." groups = ["default"] dependencies = [ "Sphinx>=4.0", + "importlib-metadata>1; python_version <= \"3.8\"", "pydantic-settings<3.0.0,>=2.0", "pydantic<3.0.0,>=2.0", ] @@ -127,6 +141,9 @@ version = "2.15.0" requires_python = ">=3.8" summary = "Internationalization utilities" groups = ["default"] +dependencies = [ + "pytz>=2015.7; python_version < \"3.9\"", +] files = [ {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, @@ -147,41 +164,35 @@ files = [ ] [[package]] -name = "blosc2" -version = "2.7.0" -requires_python = "<4,>=3.10" -summary = "Python wrapper for the C-Blosc2 library" +name = "black" +version = "24.8.0" +requires_python = ">=3.8" +summary = "The uncompromising code formatter." groups = ["default"] dependencies = [ - "msgpack", - "ndindex>=1.4", - "numexpr", - "numpy>=1.20.3", - "py-cpuinfo", + "click>=8.0.0", + "mypy-extensions>=0.4.3", + "packaging>=22.0", + "pathspec>=0.9.0", + "platformdirs>=2", + "tomli>=1.1.0; python_version < \"3.11\"", + "typing-extensions>=4.0.1; python_version < \"3.11\"", ] files = [ - {file = "blosc2-2.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aa71042277956199676169335eb64aa76e33adac5a22289eccdb7d10edf402b6"}, - {file = "blosc2-2.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:18e3c4c95fe40ea9cda88c784d96e4efc8ddf53f94074cf46daa2e91c9ae5137"}, - {file = "blosc2-2.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ac66ce25214b0b2e53beda9bc6f333dba16f2667649b1026ae041511b5a07d"}, - {file = "blosc2-2.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:928a89851b8528ce9c233048d832be5b2fef47645d5a389c021f3f58333fa3f8"}, - {file = "blosc2-2.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a9518b7bbaa0f9903a5a921abe6abb0faa56b0e0ad2da0416ff3a486a4b2e0aa"}, - {file = "blosc2-2.7.0-cp310-cp310-win32.whl", hash = "sha256:488dc4be3b6894967a7189952634644f8da46c4bab7734719d379cdf5b440dc0"}, - {file = "blosc2-2.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:17dd39f62f1686a170232ac8bcba40358ef67e919a91fe840ac71a45d067df30"}, - {file = "blosc2-2.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:565701ad336946a7ef12250def97aae2257de1da34ac8cd570be91b664a03d30"}, - {file = "blosc2-2.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b640fe2d1d39af2dccffe5e100ef94d21940bfb7f0af44ba17fef718671b267"}, - {file = "blosc2-2.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:634bc22f17ae47a166b8201c77ba11bc160d9997ace51fc820cb3cbd285d47f8"}, - {file = "blosc2-2.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d4b208d5f5947d3062d3353717c43e0ea8e6ccdecdcd30737d5305628e0062b"}, - {file = "blosc2-2.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fd3ca9a61bce4e4dc8006b613fa9dd8982f71e01fa9f593d6cc44d9fdbb56174"}, - {file = "blosc2-2.7.0-cp311-cp311-win32.whl", hash = "sha256:4518944374880d822f9ca90d4473bfa9f4d884b462f78365e224c2b291962e44"}, - {file = "blosc2-2.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:05d40ede9cf0ecb25500cfe9bebe190e75f246eb1fcd7bd358ac1acfef44ee7a"}, - {file = "blosc2-2.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:729305b06e76b0c95b0ea5090aa7ec87eff72ca43e194283e0cccee92bbdd1e6"}, - {file = "blosc2-2.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64a26c9f7a4a5ddc5721a75b37f913f9e21c0dab96d8c152a64f8faf8659e9ee"}, - {file = "blosc2-2.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:770733ce68d82674d1f80961fe56f3c2d914d8ea4de036af3888a22479add97d"}, - {file = "blosc2-2.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6a700f9324b37e814c5633c43b081c60962f4dd59c0340cefe5f61f9f0411fd"}, - {file = "blosc2-2.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1546c04d25ce793fa0fd7a83999bbb576ff84ef474fb45801f0b6dd76b84803c"}, - {file = "blosc2-2.7.0-cp312-cp312-win32.whl", hash = "sha256:407896867032a760dcce6c25d5e5a56b6fe5235245e065e2549697f69b5117c6"}, - {file = "blosc2-2.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:62d2a6eaf1be1858993a4d7b2b8efd2ede5c4eaabe030c611cd075d907aa5400"}, - {file = "blosc2-2.7.0.tar.gz", hash = "sha256:9b982c1d40560eefb4a01d67c57e786d39a5ee9696f3deadd32ebf5f8885eb2a"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [[package]] @@ -197,7 +208,7 @@ files = [ [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.0" requires_python = ">=3.8" summary = "Foreign Function Interface for Python calling C code." groups = ["default"] @@ -206,39 +217,42 @@ dependencies = [ "pycparser", ] files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, + {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, + {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, + {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, + {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, + {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, + {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, + {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, + {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, + {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, + {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, + {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, + {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, + {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, + {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, + {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, + {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, ] [[package]] @@ -328,23 +342,13 @@ summary = "Composable command line interface toolkit" groups = ["default"] dependencies = [ "colorama; platform_system == \"Windows\"", + "importlib-metadata; python_version < \"3.8\"", ] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] -[[package]] -name = "cloudpickle" -version = "3.0.0" -requires_python = ">=3.8" -summary = "Pickler class to extend the standard pickle.Pickler functionality" -groups = ["default"] -files = [ - {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, - {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, -] - [[package]] name = "colorama" version = "0.4.6" @@ -372,7 +376,7 @@ files = [ [[package]] name = "curies" -version = "0.7.9" +version = "0.7.10" requires_python = ">=3.8" summary = "Idiomatic conversion between URIs and compact URIs (CURIEs)." groups = ["default"] @@ -382,52 +386,31 @@ dependencies = [ "requests", ] files = [ - {file = "curies-0.7.9-py3-none-any.whl", hash = "sha256:e4c5beb91642376953c94db0ee2fb5d2b011c3b16749516436114ba61442f260"}, - {file = "curies-0.7.9.tar.gz", hash = "sha256:3b63c5fea7b0e967629a3a384b1a8c59b56c503487c1dcbacddeab59e25db4d8"}, -] - -[[package]] -name = "dask" -version = "2024.7.0" -requires_python = ">=3.9" -summary = "Parallel PyData with Task Scheduling" -groups = ["default"] -dependencies = [ - "click>=8.1", - "cloudpickle>=1.5.0", - "fsspec>=2021.09.0", - "importlib-metadata>=4.13.0; python_version < \"3.12\"", - "packaging>=20.0", - "partd>=1.4.0", - "pyyaml>=5.3.1", - "toolz>=0.10.0", -] -files = [ - {file = "dask-2024.7.0-py3-none-any.whl", hash = "sha256:0f30f218a1fe1c8e9a6ba8add1207088ba9ff049098d4ea4ce045fd5ff7ca914"}, - {file = "dask-2024.7.0.tar.gz", hash = "sha256:0060bae9a58b5b3ce7e0d97040e903b4d3db09ba49222101cfc40f9834a8a6bc"}, + {file = "curies-0.7.10-py3-none-any.whl", hash = "sha256:ad80f420dd76b6f3e921a245370ff6ab7473c48c29c17254970c03cd2e58af5f"}, + {file = "curies-0.7.10.tar.gz", hash = "sha256:98a7ceb94710fab3a02727a7f85ba0719dd22be5fc8b5f2ad1d7d4cfc47d64ce"}, ] [[package]] name = "debugpy" -version = "1.8.2" +version = "1.8.5" requires_python = ">=3.8" summary = "An implementation of the Debug Adapter Protocol for Python" groups = ["default"] files = [ - {file = "debugpy-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2"}, - {file = "debugpy-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6"}, - {file = "debugpy-1.8.2-cp310-cp310-win32.whl", hash = "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47"}, - {file = "debugpy-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3"}, - {file = "debugpy-1.8.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a"}, - {file = "debugpy-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634"}, - {file = "debugpy-1.8.2-cp311-cp311-win32.whl", hash = "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad"}, - {file = "debugpy-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa"}, - {file = "debugpy-1.8.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835"}, - {file = "debugpy-1.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3"}, - {file = "debugpy-1.8.2-cp312-cp312-win32.whl", hash = "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e"}, - {file = "debugpy-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859"}, - {file = "debugpy-1.8.2-py2.py3-none-any.whl", hash = "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca"}, - {file = "debugpy-1.8.2.zip", hash = "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00"}, + {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, + {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, + {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, + {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, + {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, + {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, + {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, + {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, + {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, + {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, + {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, + {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, + {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, + {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, ] [[package]] @@ -479,14 +462,14 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" requires_python = ">=3.7" summary = "Backport of PEP 654 (exception groups)" groups = ["default"] marker = "python_version < \"3.11\"" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [[package]] @@ -516,25 +499,17 @@ version = "1.5.1" requires_python = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" summary = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" groups = ["default"] +dependencies = [ + "cached-property>=1.3.0; python_version < \"3.8\"", +] files = [ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, ] -[[package]] -name = "fsspec" -version = "2024.6.1" -requires_python = ">=3.8" -summary = "File-system specification" -groups = ["default"] -files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, -] - [[package]] name = "furo" -version = "2024.5.6" +version = "2024.8.6" requires_python = ">=3.8" summary = "A clean customisable Sphinx documentation theme." groups = ["default"] @@ -542,11 +517,11 @@ dependencies = [ "beautifulsoup4", "pygments>=2.7", "sphinx-basic-ng>=1.0.0.beta2", - "sphinx<8.0,>=6.0", + "sphinx<9.0,>=6.0", ] files = [ - {file = "furo-2024.5.6-py3-none-any.whl", hash = "sha256:490a00d08c0a37ecc90de03ae9227e8eb5d6f7f750edf9807f398a2bdf2358de"}, - {file = "furo-2024.5.6.tar.gz", hash = "sha256:81f205a6605ebccbb883350432b4831c0196dd3d1bc92f61e1f459045b3d2b0b"}, + {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"}, + {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"}, ] [[package]] @@ -604,6 +579,9 @@ version = "0.14.0" requires_python = ">=3.7" summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" groups = ["default"] +dependencies = [ + "typing-extensions; python_version < \"3.8\"", +] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -669,16 +647,17 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.0.0" +version = "8.2.0" requires_python = ">=3.8" summary = "Read metadata from Python packages" groups = ["default"] dependencies = [ + "typing-extensions>=3.6.4; python_version < \"3.8\"", "zipp>=0.5", ] files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, + {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, + {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, ] [[package]] @@ -900,7 +879,9 @@ summary = "An implementation of JSON Schema validation for Python" groups = ["default"] dependencies = [ "attrs>=22.2.0", + "importlib-resources>=1.4.0; python_version < \"3.9\"", "jsonschema-specifications>=2023.03.6", + "pkgutil-resolve-name>=1.3.10; python_version < \"3.9\"", "referencing>=0.28.4", "rpds-py>=0.7.1", ] @@ -916,6 +897,7 @@ requires_python = ">=3.8" summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" groups = ["default"] dependencies = [ + "importlib-resources>=1.4.0; python_version < \"3.9\"", "referencing>=0.31.0", ] files = [ @@ -974,6 +956,7 @@ requires_python = ">=3.8" summary = "Jupyter protocol implementation and client libraries" groups = ["default"] dependencies = [ + "importlib-metadata>=4.8.3; python_version < \"3.10\"", "jupyter-core!=5.0.*,>=4.12", "python-dateutil>=2.8.2", "pyzmq>=23.0", @@ -1017,8 +1000,8 @@ name = "linkml" version = "0.0.0" requires_python = "<4.0.0,>=3.8.1" git = "https://github.com/sneakers-the-rat/linkml" -ref = "arrays-numpydantic" -revision = "b70daae67170c5a5e321b2aa24a2db4237c87e4f" +ref = "nwb-linkml" +revision = "0a6578bff4713688260f64b3076b197bd6decce9" summary = "Linked Open Data Modeling Language" groups = ["default"] dependencies = [ @@ -1045,6 +1028,7 @@ dependencies = [ "rdflib>=6.0.0", "requests>=2.22", "sqlalchemy>=1.4.31", + "typing-extensions>=4.4.0; python_version < \"3.9\"", "watchdog>=0.9.0", ] @@ -1092,17 +1076,6 @@ files = [ {file = "linkml_runtime-1.8.0.tar.gz", hash = "sha256:436381a7bf791e9af4ef0a5adcac86762d451b77670fbdb3ba083d2c177fb5f2"}, ] -[[package]] -name = "locket" -version = "1.0.0" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -summary = "File-based locks for Python on Linux and Windows" -groups = ["default"] -files = [ - {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"}, - {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, -] - [[package]] name = "markdown-it-py" version = "3.0.0" @@ -1197,46 +1170,14 @@ files = [ ] [[package]] -name = "msgpack" -version = "1.0.8" -requires_python = ">=3.8" -summary = "MessagePack serializer" +name = "mypy-extensions" +version = "1.0.0" +requires_python = ">=3.5" +summary = "Type system extensions for programs checked with the mypy type checker." groups = ["default"] files = [ - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, - {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, - {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, - {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, - {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, - {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, - {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, - {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] @@ -1262,21 +1203,21 @@ dependencies = [ [[package]] name = "myst-parser" -version = "3.0.1" -requires_python = ">=3.8" +version = "4.0.0" +requires_python = ">=3.10" summary = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," groups = ["default"] dependencies = [ - "docutils<0.22,>=0.18", + "docutils<0.22,>=0.19", "jinja2", "markdown-it-py~=3.0", - "mdit-py-plugins~=0.4", + "mdit-py-plugins>=0.4.1,~=0.4", "pyyaml", - "sphinx<8,>=6", + "sphinx<9,>=7", ] files = [ - {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, - {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, + {file = "myst_parser-4.0.0-py3-none-any.whl", hash = "sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d"}, + {file = "myst_parser-4.0.0.tar.gz", hash = "sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531"}, ] [[package]] @@ -1313,17 +1254,6 @@ files = [ {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, ] -[[package]] -name = "ndindex" -version = "1.8" -requires_python = ">=3.8" -summary = "A Python library for manipulating indices of ndarrays." -groups = ["default"] -files = [ - {file = "ndindex-1.8-py3-none-any.whl", hash = "sha256:b5132cd331f3e4106913ed1a974a3e355967a5991543c2f512b40cb8bb9f50b8"}, - {file = "ndindex-1.8.tar.gz", hash = "sha256:5fc87ebc784605f01dd5367374cb40e8da8f2c30988968990066c5098a7eebe8"}, -] - [[package]] name = "nest-asyncio" version = "1.6.0" @@ -1335,105 +1265,60 @@ files = [ {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, ] -[[package]] -name = "nptyping" -version = "2.5.0" -requires_python = ">=3.7" -summary = "Type hints for NumPy." -groups = ["default"] -dependencies = [ - "numpy<2.0.0,>=1.20.0; python_version >= \"3.8\"", -] -files = [ - {file = "nptyping-2.5.0-py3-none-any.whl", hash = "sha256:764e51836faae33a7ae2e928af574cfb701355647accadcc89f2ad793630b7c8"}, - {file = "nptyping-2.5.0.tar.gz", hash = "sha256:e3d35b53af967e6fb407c3016ff9abae954d3a0568f7cc13a461084224e8e20a"}, -] - -[[package]] -name = "numexpr" -version = "2.10.1" -requires_python = ">=3.9" -summary = "Fast numerical expression evaluator for NumPy" -groups = ["default"] -dependencies = [ - "numpy>=1.23.0", -] -files = [ - {file = "numexpr-2.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bbd35f17f6efc00ebd4a480192af1ee30996094a0d5343b131b0e90e61e8b554"}, - {file = "numexpr-2.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fecdf4bf3c1250e56583db0a4a80382a259ba4c2e1efa13e04ed43f0938071f5"}, - {file = "numexpr-2.10.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2efa499f460124538a5b4f1bf2e77b28eb443ee244cc5573ed0f6a069ebc635"}, - {file = "numexpr-2.10.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac23a72eff10f928f23b147bdeb0f1b774e862abe332fc9bf4837e9f1bc0bbf9"}, - {file = "numexpr-2.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b28eaf45f1cc1048aad9e90e3a8ada1aef58c5f8155a85267dc781b37998c046"}, - {file = "numexpr-2.10.1-cp310-cp310-win32.whl", hash = "sha256:4f0985bd1c493b23b5aad7d81fa174798f3812efb78d14844194834c9fee38b8"}, - {file = "numexpr-2.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:44f6d12a8c44be90199bbb10d3abf467f88951f48a3d1fbbd3c219d121f39c9d"}, - {file = "numexpr-2.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3c0b0bf165b2d886eb981afa4e77873ca076f5d51c491c4d7b8fc10f17c876f"}, - {file = "numexpr-2.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56648a04679063175681195670ad53e5c8ca19668166ed13875199b5600089c7"}, - {file = "numexpr-2.10.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce04ae6efe2a9d0be1a0e114115c3ae70c68b8b8fbc615c5c55c15704b01e6a4"}, - {file = "numexpr-2.10.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:45f598182b4f5c153222e47d5163c3bee8d5ebcaee7e56dd2a5898d4d97e4473"}, - {file = "numexpr-2.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6a50370bea77ba94c3734a44781c716751354c6bfda2d369af3aed3d67d42871"}, - {file = "numexpr-2.10.1-cp311-cp311-win32.whl", hash = "sha256:fa4009d84a8e6e21790e718a80a22d57fe7f215283576ef2adc4183f7247f3c7"}, - {file = "numexpr-2.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:fcbf013bb8494e8ef1d11fa3457827c1571c6a3153982d709e5d17594999d4dd"}, - {file = "numexpr-2.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82fc95c301b15ff4823f98989ee363a2d5555d16a7cfd3710e98ddee726eaaaa"}, - {file = "numexpr-2.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cbf79fef834f88607f977ab9867061dcd9b40ccb08bb28547c6dc6c73e560895"}, - {file = "numexpr-2.10.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:552c8d4b2e3b87cdb2abb40a781b9a61a9090a9f66ac7357fc5a0b93aff76be3"}, - {file = "numexpr-2.10.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22cc65e9121aeb3187a2b50827715b2b087ea70e8ab21416ea52662322087b43"}, - {file = "numexpr-2.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:00204e5853713b5eba5f3d0bc586a5d8d07f76011b597c8b4087592cc2ec2928"}, - {file = "numexpr-2.10.1-cp312-cp312-win32.whl", hash = "sha256:82bf04a1495ac475de4ab49fbe0a3a2710ed3fd1a00bc03847316b5d7602402d"}, - {file = "numexpr-2.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:300e577b3c006dd7a8270f1bb2e8a00ee15bf235b1650fe2a6febec2954bc2c3"}, - {file = "numexpr-2.10.1.tar.gz", hash = "sha256:9bba99d354a65f1a008ab8b87f07d84404c668e66bab624df5b6b5373403cf81"}, -] - [[package]] name = "numpy" -version = "1.26.4" +version = "2.0.1" requires_python = ">=3.9" summary = "Fundamental package for array computing in Python" groups = ["default"] files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"}, + {file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"}, + {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"}, + {file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"}, + {file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"}, + {file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"}, + {file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"}, + {file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"}, + {file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"}, + {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"}, + {file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"}, + {file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"}, + {file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"}, + {file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"}, + {file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"}, + {file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"}, + {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"}, + {file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"}, + {file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"}, + {file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"}, + {file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"}, + {file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"}, + {file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"}, ] [[package]] name = "numpydantic" -version = "1.2.1" +version = "1.3.0" requires_python = "<4.0,>=3.9" summary = "Type and shape validation and serialization for numpy arrays in pydantic models" groups = ["default"] dependencies = [ - "nptyping>=2.5.0", "numpy>=1.24.0", "pydantic>=2.3.0", + "typing-extensions>=4.11.0; python_version < \"3.11\"", ] files = [ - {file = "numpydantic-1.2.1-py3-none-any.whl", hash = "sha256:e21d7e272410b3a2013d2a6aeec2ed6efd13ea171b0200e2029d7c2f1453def0"}, - {file = "numpydantic-1.2.1.tar.gz", hash = "sha256:d8a3e7371d78b99fa4a4733a5b873046f064993431ae63f97edcf9bda4dd5c7f"}, + {file = "numpydantic-1.3.0-py3-none-any.whl", hash = "sha256:bda3aa2cd858e9211006be8b8e589e1905b2c6a2db17cec0c28563ba1ad66b68"}, + {file = "numpydantic-1.3.0.tar.gz", hash = "sha256:b3931d51ba7e22d48bdd2ae56cad368f63db99ef74e8570021a7fd176b2ffc1f"}, ] [[package]] @@ -1444,14 +1329,13 @@ path = "../nwb_linkml" summary = "Translating NWB schema language to LinkML" groups = ["default"] dependencies = [ - "blosc2>=2.2.7", - "dask>=2023.9.2", + "black>=24.4.2", "h5py>=3.9.0", - "linkml @ git+https://github.com/sneakers-the-rat/linkml@arrays-numpydantic", + "linkml @ git+https://github.com/sneakers-the-rat/linkml@nwb-linkml", "linkml-runtime>=1.7.7", - "nptyping>=2.5.0", - "numpydantic>=1.2.1", + "numpydantic>=1.3.0", "nwb-schema-language>=0.1.3", + "pandas>=2.2.2", "pydantic-settings>=2.0.3", "pydantic>=2.3.0", "pyyaml>=6.0", @@ -1497,6 +1381,45 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] +[[package]] +name = "pandas" +version = "2.2.2" +requires_python = ">=3.9" +summary = "Powerful data structures for data analysis, time series, and statistics" +groups = ["default"] +dependencies = [ + "numpy>=1.22.4; python_version < \"3.11\"", + "numpy>=1.23.2; python_version == \"3.11\"", + "numpy>=1.26.0; python_version >= \"3.12\"", + "python-dateutil>=2.8.2", + "pytz>=2020.1", + "tzdata>=2022.7", +] +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + [[package]] name = "parse" version = "1.20.2" @@ -1519,18 +1442,14 @@ files = [ ] [[package]] -name = "partd" -version = "1.4.2" -requires_python = ">=3.9" -summary = "Appendable key-value storage" +name = "pathspec" +version = "0.12.1" +requires_python = ">=3.8" +summary = "Utility library for gitignore style pattern matching of file paths." groups = ["default"] -dependencies = [ - "locket", - "toolz", -] files = [ - {file = "partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f"}, - {file = "partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] @@ -1598,7 +1517,7 @@ files = [ [[package]] name = "prefixmaps" -version = "0.2.4" +version = "0.2.5" requires_python = "<4.0,>=3.8" summary = "A python library for retrieving semantic prefix maps" groups = ["default"] @@ -1607,8 +1526,8 @@ dependencies = [ "pyyaml>=5.3.1", ] files = [ - {file = "prefixmaps-0.2.4-py3-none-any.whl", hash = "sha256:89bf0e6fb08c276f754f9624c42adf2e87c64ee92a3dde1f7eff01f22d85b512"}, - {file = "prefixmaps-0.2.4.tar.gz", hash = "sha256:ae86a1b31189d0516d199756d5808f75f44b39e86546c356cc78c0fe8d2078af"}, + {file = "prefixmaps-0.2.5-py3-none-any.whl", hash = "sha256:68caa04b3a6a8e058aa1c55affe32c62e44b564d031d63f768e267b796a1f3ee"}, + {file = "prefixmaps-0.2.5.tar.gz", hash = "sha256:aaccd2425ade2ea97a502c58be49fe8f3536e3d5e919712ae0358a39fc800799"}, ] [[package]] @@ -1655,22 +1574,12 @@ files = [ [[package]] name = "pure-eval" -version = "0.2.2" +version = "0.2.3" summary = "Safely evaluate AST nodes without side effects" groups = ["default"] files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] - -[[package]] -name = "py-cpuinfo" -version = "9.0.0" -summary = "Get CPU info with pure Python" -groups = ["default"] -files = [ - {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, - {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, ] [[package]] @@ -1694,6 +1603,7 @@ groups = ["default"] dependencies = [ "annotated-types>=0.4.0", "pydantic-core==2.20.1", + "typing-extensions>=4.12.2; python_version >= \"3.13\"", "typing-extensions>=4.6.1; python_version < \"3.13\"", ] files = [ @@ -1755,20 +1665,12 @@ files = [ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [[package]] name = "pydantic-settings" -version = "2.3.4" +version = "2.4.0" requires_python = ">=3.8" summary = "Settings management using Pydantic" groups = ["default"] @@ -1777,8 +1679,8 @@ dependencies = [ "python-dotenv>=0.21.0", ] files = [ - {file = "pydantic_settings-2.3.4-py3-none-any.whl", hash = "sha256:11ad8bacb68a045f00e4f862c7a718c8a9ec766aa8fd4c32e39a0594b207b53a"}, - {file = "pydantic_settings-2.3.4.tar.gz", hash = "sha256:c5802e3d62b78e82522319bbc9b8f8ffb28ad1c988a99311d04f2a6051fca0a7"}, + {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, + {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, ] [[package]] @@ -1860,7 +1762,7 @@ files = [ [[package]] name = "pytest" -version = "8.2.2" +version = "8.3.2" requires_python = ">=3.8" summary = "pytest: simple powerful testing with Python" groups = ["default"] @@ -1869,12 +1771,12 @@ dependencies = [ "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", "iniconfig", "packaging", - "pluggy<2.0,>=1.5", + "pluggy<2,>=1.5", "tomli>=1; python_version < \"3.11\"", ] files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [[package]] @@ -1927,6 +1829,16 @@ files = [ {file = "PyTrie-0.4.0.tar.gz", hash = "sha256:8f4488f402d3465993fb6b6efa09866849ed8cda7903b50647b7d0342b805379"}, ] +[[package]] +name = "pytz" +version = "2024.1" +summary = "World timezone definitions, modern and historical" +groups = ["default"] +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + [[package]] name = "pywin32" version = "306" @@ -1946,40 +1858,44 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" -requires_python = ">=3.6" +version = "6.0.2" +requires_python = ">=3.8" summary = "YAML parser and emitter for Python" groups = ["default"] files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "pyzmq" -version = "26.0.3" +version = "26.1.0" requires_python = ">=3.7" summary = "Python bindings for 0MQ" groups = ["default"] @@ -1987,64 +1903,48 @@ dependencies = [ "cffi; implementation_name == \"pypy\"", ] files = [ - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, - {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, - {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, - {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, - {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, - {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, - {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, - {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, - {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, - {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, - {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, - {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, - {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, - {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, - {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, - {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, - {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, - {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, - {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, - {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, - {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, + {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:263cf1e36862310bf5becfbc488e18d5d698941858860c5a8c079d1511b3b18e"}, + {file = "pyzmq-26.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d5c8b17f6e8f29138678834cf8518049e740385eb2dbf736e8f07fc6587ec682"}, + {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a95c2358fcfdef3374cb8baf57f1064d73246d55e41683aaffb6cfe6862917"}, + {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f99de52b8fbdb2a8f5301ae5fc0f9e6b3ba30d1d5fc0421956967edcc6914242"}, + {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bcbfbab4e1895d58ab7da1b5ce9a327764f0366911ba5b95406c9104bceacb0"}, + {file = "pyzmq-26.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77ce6a332c7e362cb59b63f5edf730e83590d0ab4e59c2aa5bd79419a42e3449"}, + {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba0a31d00e8616149a5ab440d058ec2da621e05d744914774c4dde6837e1f545"}, + {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b88641384e84a258b740801cd4dbc45c75f148ee674bec3149999adda4a8598"}, + {file = "pyzmq-26.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2fa76ebcebe555cce90f16246edc3ad83ab65bb7b3d4ce408cf6bc67740c4f88"}, + {file = "pyzmq-26.1.0-cp310-cp310-win32.whl", hash = "sha256:fbf558551cf415586e91160d69ca6416f3fce0b86175b64e4293644a7416b81b"}, + {file = "pyzmq-26.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:a7b8aab50e5a288c9724d260feae25eda69582be84e97c012c80e1a5e7e03fb2"}, + {file = "pyzmq-26.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:08f74904cb066e1178c1ec706dfdb5c6c680cd7a8ed9efebeac923d84c1f13b1"}, + {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:46d6800b45015f96b9d92ece229d92f2aef137d82906577d55fadeb9cf5fcb71"}, + {file = "pyzmq-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bc2431167adc50ba42ea3e5e5f5cd70d93e18ab7b2f95e724dd8e1bd2c38120"}, + {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3bb34bebaa1b78e562931a1687ff663d298013f78f972a534f36c523311a84d"}, + {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3f6329340cef1c7ba9611bd038f2d523cea79f09f9c8f6b0553caba59ec562"}, + {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:471880c4c14e5a056a96cd224f5e71211997d40b4bf5e9fdded55dafab1f98f2"}, + {file = "pyzmq-26.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ce6f2b66799971cbae5d6547acefa7231458289e0ad481d0be0740535da38d8b"}, + {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a1f6ea5b1d6cdbb8cfa0536f0d470f12b4b41ad83625012e575f0e3ecfe97f0"}, + {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b45e6445ac95ecb7d728604bae6538f40ccf4449b132b5428c09918523abc96d"}, + {file = "pyzmq-26.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:94c4262626424683feea0f3c34951d39d49d354722db2745c42aa6bb50ecd93b"}, + {file = "pyzmq-26.1.0-cp311-cp311-win32.whl", hash = "sha256:a0f0ab9df66eb34d58205913f4540e2ad17a175b05d81b0b7197bc57d000e829"}, + {file = "pyzmq-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8efb782f5a6c450589dbab4cb0f66f3a9026286333fe8f3a084399149af52f29"}, + {file = "pyzmq-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f133d05aaf623519f45e16ab77526e1e70d4e1308e084c2fb4cedb1a0c764bbb"}, + {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3d3146b1c3dcc8a1539e7cc094700b2be1e605a76f7c8f0979b6d3bde5ad4072"}, + {file = "pyzmq-26.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d9270fbf038bf34ffca4855bcda6e082e2c7f906b9eb8d9a8ce82691166060f7"}, + {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995301f6740a421afc863a713fe62c0aaf564708d4aa057dfdf0f0f56525294b"}, + {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7eca8b89e56fb8c6c26dd3e09bd41b24789022acf1cf13358e96f1cafd8cae3"}, + {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d4feb2e83dfe9ace6374a847e98ee9d1246ebadcc0cb765482e272c34e5820"}, + {file = "pyzmq-26.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d4fafc2eb5d83f4647331267808c7e0c5722c25a729a614dc2b90479cafa78bd"}, + {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58c33dc0e185dd97a9ac0288b3188d1be12b756eda67490e6ed6a75cf9491d79"}, + {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:68a0a1d83d33d8367ddddb3e6bb4afbb0f92bd1dac2c72cd5e5ddc86bdafd3eb"}, + {file = "pyzmq-26.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ae7c57e22ad881af78075e0cea10a4c778e67234adc65c404391b417a4dda83"}, + {file = "pyzmq-26.1.0-cp312-cp312-win32.whl", hash = "sha256:347e84fc88cc4cb646597f6d3a7ea0998f887ee8dc31c08587e9c3fd7b5ccef3"}, + {file = "pyzmq-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:9f136a6e964830230912f75b5a116a21fe8e34128dcfd82285aa0ef07cb2c7bd"}, + {file = "pyzmq-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4b7a989c8f5a72ab1b2bbfa58105578753ae77b71ba33e7383a31ff75a504c4"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b24079a14c9596846bf7516fe75d1e2188d4a528364494859106a33d8b48be38"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59d0acd2976e1064f1b398a00e2c3e77ed0a157529779e23087d4c2fb8aaa416"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:911c43a4117915203c4cc8755e0f888e16c4676a82f61caee2f21b0c00e5b894"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10163e586cc609f5f85c9b233195554d77b1e9a0801388907441aaeb22841c5"}, + {file = "pyzmq-26.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:28a8b2abb76042f5fd7bd720f7fea48c0fd3e82e9de0a1bf2c0de3812ce44a42"}, + {file = "pyzmq-26.1.0.tar.gz", hash = "sha256:6c5aeea71f018ebd3b9115c7cb13863dd850e98ca6b9258509de1246461a7e7f"}, ] [[package]] @@ -2155,6 +2055,7 @@ groups = ["default"] dependencies = [ "markdown-it-py>=2.2.0", "pygments<3.0.0,>=2.13.0", + "typing-extensions<5.0,>=4.0.0; python_version < \"3.9\"", ] files = [ {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, @@ -2163,84 +2064,63 @@ files = [ [[package]] name = "rpds-py" -version = "0.19.0" +version = "0.20.0" requires_python = ">=3.8" summary = "Python bindings to Rust's persistent data structures (rpds)" groups = ["default"] files = [ - {file = "rpds_py-0.19.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4"}, - {file = "rpds_py-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54548e0be3ac117595408fd4ca0ac9278fde89829b0b518be92863b17ff67a2"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8eb488ef928cdbc05a27245e52de73c0d7c72a34240ef4d9893fdf65a8c1a955"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5da93debdfe27b2bfc69eefb592e1831d957b9535e0943a0ee8b97996de21b5"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79e205c70afddd41f6ee79a8656aec738492a550247a7af697d5bd1aee14f766"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:959179efb3e4a27610e8d54d667c02a9feaa86bbabaf63efa7faa4dfa780d4f1"}, - {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6e605bb9edcf010f54f8b6a590dd23a4b40a8cb141255eec2a03db249bc915b"}, - {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9133d75dc119a61d1a0ded38fb9ba40a00ef41697cc07adb6ae098c875195a3f"}, - {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd36b712d35e757e28bf2f40a71e8f8a2d43c8b026d881aa0c617b450d6865c9"}, - {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354f3a91718489912f2e0fc331c24eaaf6a4565c080e00fbedb6015857c00582"}, - {file = "rpds_py-0.19.0-cp310-none-win32.whl", hash = "sha256:ebcbf356bf5c51afc3290e491d3722b26aaf5b6af3c1c7f6a1b757828a46e336"}, - {file = "rpds_py-0.19.0-cp310-none-win_amd64.whl", hash = "sha256:75a6076289b2df6c8ecb9d13ff79ae0cad1d5fb40af377a5021016d58cd691ec"}, - {file = "rpds_py-0.19.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6d45080095e585f8c5097897313def60caa2046da202cdb17a01f147fb263b81"}, - {file = "rpds_py-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5c9581019c96f865483d031691a5ff1cc455feb4d84fc6920a5ffc48a794d8a"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1540d807364c84516417115c38f0119dfec5ea5c0dd9a25332dea60b1d26fc4d"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e65489222b410f79711dc3d2d5003d2757e30874096b2008d50329ea4d0f88c"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9da6f400eeb8c36f72ef6646ea530d6d175a4f77ff2ed8dfd6352842274c1d8b"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f46bb11858717e0efa7893c0f7055c43b44c103e40e69442db5061cb26ed34"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:071d4adc734de562bd11d43bd134330fb6249769b2f66b9310dab7460f4bf714"}, - {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9625367c8955e4319049113ea4f8fee0c6c1145192d57946c6ffcd8fe8bf48dd"}, - {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e19509145275d46bc4d1e16af0b57a12d227c8253655a46bbd5ec317e941279d"}, - {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d438e4c020d8c39961deaf58f6913b1bf8832d9b6f62ec35bd93e97807e9cbc"}, - {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90bf55d9d139e5d127193170f38c584ed3c79e16638890d2e36f23aa1630b952"}, - {file = "rpds_py-0.19.0-cp311-none-win32.whl", hash = "sha256:8d6ad132b1bc13d05ffe5b85e7a01a3998bf3a6302ba594b28d61b8c2cf13aaf"}, - {file = "rpds_py-0.19.0-cp311-none-win_amd64.whl", hash = "sha256:7ec72df7354e6b7f6eb2a17fa6901350018c3a9ad78e48d7b2b54d0412539a67"}, - {file = "rpds_py-0.19.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5095a7c838a8647c32aa37c3a460d2c48debff7fc26e1136aee60100a8cd8f68"}, - {file = "rpds_py-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f2f78ef14077e08856e788fa482107aa602636c16c25bdf59c22ea525a785e9"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cc6cb44f8636fbf4a934ca72f3e786ba3c9f9ba4f4d74611e7da80684e48d2"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf902878b4af334a09de7a45badbff0389e7cf8dc2e4dcf5f07125d0b7c2656d"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:688aa6b8aa724db1596514751ffb767766e02e5c4a87486ab36b8e1ebc1aedac"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57dbc9167d48e355e2569346b5aa4077f29bf86389c924df25c0a8b9124461fb"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4cf5a9497874822341c2ebe0d5850fed392034caadc0bad134ab6822c0925b"}, - {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a790d235b9d39c70a466200d506bb33a98e2ee374a9b4eec7a8ac64c2c261fa"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1d16089dfa58719c98a1c06f2daceba6d8e3fb9b5d7931af4a990a3c486241cb"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bc9128e74fe94650367fe23f37074f121b9f796cabbd2f928f13e9661837296d"}, - {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8f77e661ffd96ff104bebf7d0f3255b02aa5d5b28326f5408d6284c4a8b3248"}, - {file = "rpds_py-0.19.0-cp312-none-win32.whl", hash = "sha256:5f83689a38e76969327e9b682be5521d87a0c9e5a2e187d2bc6be4765f0d4600"}, - {file = "rpds_py-0.19.0-cp312-none-win_amd64.whl", hash = "sha256:06925c50f86da0596b9c3c64c3837b2481337b83ef3519e5db2701df695453a4"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388"}, - {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a"}, - {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a4b07cdf3f84310c08c1de2c12ddadbb7a77568bcb16e95489f9c81074322ed"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba0ed0dc6763d8bd6e5de5cf0d746d28e706a10b615ea382ac0ab17bb7388633"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:474bc83233abdcf2124ed3f66230a1c8435896046caa4b0b5ab6013c640803cc"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329c719d31362355a96b435f4653e3b4b061fcc9eba9f91dd40804ca637d914e"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef9101f3f7b59043a34f1dccbb385ca760467590951952d6701df0da9893ca0c"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0121803b0f424ee2109d6e1f27db45b166ebaa4b32ff47d6aa225642636cd834"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8344127403dea42f5970adccf6c5957a71a47f522171fafaf4c6ddb41b61703a"}, - {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:443cec402ddd650bb2b885113e1dcedb22b1175c6be223b14246a714b61cd521"}, - {file = "rpds_py-0.19.0.tar.gz", hash = "sha256:4fdc9afadbeb393b4bbbad75481e0ea78e4469f2e1d713a90811700830b553a9"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, + {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, + {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, + {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, + {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, + {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, + {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, + {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] [[package]] @@ -2294,13 +2174,13 @@ files = [ [[package]] name = "setuptools" -version = "70.2.0" +version = "72.1.0" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" groups = ["default"] files = [ - {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"}, - {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"}, + {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, + {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, ] [[package]] @@ -2401,21 +2281,22 @@ files = [ [[package]] name = "sphinx" -version = "7.3.7" +version = "7.4.7" requires_python = ">=3.9" summary = "Python documentation generator" groups = ["default"] dependencies = [ - "Jinja2>=3.0", - "Pygments>=2.14", + "Jinja2>=3.1", + "Pygments>=2.17", "alabaster~=0.7.14", - "babel>=2.9", - "colorama>=0.4.5; sys_platform == \"win32\"", - "docutils<0.22,>=0.18.1", + "babel>=2.13", + "colorama>=0.4.6; sys_platform == \"win32\"", + "docutils<0.22,>=0.20", "imagesize>=1.3", - "packaging>=21.0", - "requests>=2.25.0", - "snowballstemmer>=2.0", + "importlib-metadata>=6.0; python_version < \"3.10\"", + "packaging>=23.0", + "requests>=2.30.0", + "snowballstemmer>=2.2", "sphinxcontrib-applehelp", "sphinxcontrib-devhelp", "sphinxcontrib-htmlhelp>=2.0.0", @@ -2425,8 +2306,8 @@ dependencies = [ "tomli>=2; python_version < \"3.11\"", ] files = [ - {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"}, - {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"}, + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, ] [[package]] @@ -2464,16 +2345,16 @@ files = [ [[package]] name = "sphinx-design" -version = "0.6.0" +version = "0.6.1" requires_python = ">=3.9" summary = "A sphinx extension for designing beautiful, view size responsive web components." groups = ["default"] dependencies = [ - "sphinx<8,>=5", + "sphinx<9,>=6", ] files = [ - {file = "sphinx_design-0.6.0-py3-none-any.whl", hash = "sha256:e9bd07eecec82eb07ff72cb50fc3624e186b04f5661270bc7b62db86c7546e95"}, - {file = "sphinx_design-0.6.0.tar.gz", hash = "sha256:ec8e3c5c59fed4049b3a5a2e209360feab31829346b5f6a0c7c342b894082192"}, + {file = "sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c"}, + {file = "sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632"}, ] [[package]] @@ -2510,35 +2391,35 @@ files = [ [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.8" +version = "2.0.0" requires_python = ">=3.9" summary = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" groups = ["default"] files = [ - {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, - {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.6" +version = "2.0.0" requires_python = ">=3.9" summary = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" groups = ["default"] files = [ - {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, - {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.5" +version = "2.1.0" requires_python = ">=3.9" summary = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" groups = ["default"] files = [ - {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, - {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [[package]] @@ -2554,63 +2435,64 @@ files = [ [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.7" +version = "2.0.0" requires_python = ">=3.9" summary = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" groups = ["default"] files = [ - {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, - {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.10" +version = "2.0.0" requires_python = ">=3.9" summary = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" groups = ["default"] files = [ - {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, - {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [[package]] name = "sqlalchemy" -version = "2.0.31" +version = "2.0.32" requires_python = ">=3.7" summary = "Database Abstraction Library" groups = ["default"] dependencies = [ "greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"", + "importlib-metadata; python_version < \"3.8\"", "typing-extensions>=4.6.0", ] files = [ - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, - {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, - {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, - {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, - {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, - {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c9045ecc2e4db59bfc97b20516dfdf8e41d910ac6fb667ebd3a79ea54084619"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1467940318e4a860afd546ef61fefb98a14d935cd6817ed07a228c7f7c62f389"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5954463675cb15db8d4b521f3566a017c8789222b8316b1e6934c811018ee08b"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e7497035c303ae50651b351c28dc22a40bb98fbdb8468cdc971821b1ae533"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b27dfb676ac02529fb6e343b3a482303f16e6bc3a4d868b73935b8792edb52d0"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bf2360a5e0f7bd75fa80431bf8ebcfb920c9f885e7956c7efde89031695cafb8"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win32.whl", hash = "sha256:306fe44e754a91cd9d600a6b070c1f2fadbb4a1a257b8781ccf33c7067fd3e4d"}, + {file = "SQLAlchemy-2.0.32-cp310-cp310-win_amd64.whl", hash = "sha256:99db65e6f3ab42e06c318f15c98f59a436f1c78179e6a6f40f529c8cc7100b22"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21b053be28a8a414f2ddd401f1be8361e41032d2ef5884b2f31d31cb723e559f"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b178e875a7a25b5938b53b006598ee7645172fccafe1c291a706e93f48499ff5"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723a40ee2cc7ea653645bd4cf024326dea2076673fc9d3d33f20f6c81db83e1d"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295ff8689544f7ee7e819529633d058bd458c1fd7f7e3eebd0f9268ebc56c2a0"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49496b68cd190a147118af585173ee624114dfb2e0297558c460ad7495f9dfe2"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:acd9b73c5c15f0ec5ce18128b1fe9157ddd0044abc373e6ecd5ba376a7e5d961"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win32.whl", hash = "sha256:9365a3da32dabd3e69e06b972b1ffb0c89668994c7e8e75ce21d3e5e69ddef28"}, + {file = "SQLAlchemy-2.0.32-cp311-cp311-win_amd64.whl", hash = "sha256:8bd63d051f4f313b102a2af1cbc8b80f061bf78f3d5bd0843ff70b5859e27924"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bab3db192a0c35e3c9d1560eb8332463e29e5507dbd822e29a0a3c48c0a8d92"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:19d98f4f58b13900d8dec4ed09dd09ef292208ee44cc9c2fe01c1f0a2fe440e9"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd33c61513cb1b7371fd40cf221256456d26a56284e7d19d1f0b9f1eb7dd7e8"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6ba0497c1d066dd004e0f02a92426ca2df20fac08728d03f67f6960271feec"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2b6be53e4fde0065524f1a0a7929b10e9280987b320716c1509478b712a7688c"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:916a798f62f410c0b80b63683c8061f5ebe237b0f4ad778739304253353bc1cb"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win32.whl", hash = "sha256:31983018b74908ebc6c996a16ad3690301a23befb643093fcfe85efd292e384d"}, + {file = "SQLAlchemy-2.0.32-cp312-cp312-win_amd64.whl", hash = "sha256:4363ed245a6231f2e2957cccdda3c776265a75851f4753c60f3004b90e69bfeb"}, + {file = "SQLAlchemy-2.0.32-py3-none-any.whl", hash = "sha256:e567a8793a692451f706b363ccf3c45e056b67d90ead58c3bc9471af5d212202"}, + {file = "SQLAlchemy-2.0.32.tar.gz", hash = "sha256:c1b88cc8b02b6a5f0efb0345a03672d4c897dc7d92585176f88c67346f565ea8"}, ] [[package]] @@ -2630,16 +2512,17 @@ files = [ [[package]] name = "starlette" -version = "0.37.2" +version = "0.38.2" requires_python = ">=3.8" summary = "The little ASGI library that shines." groups = ["default"] dependencies = [ "anyio<5,>=3.4.0", + "typing-extensions>=3.10.0; python_version < \"3.10\"", ] files = [ - {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"}, - {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"}, + {file = "starlette-0.38.2-py3-none-any.whl", hash = "sha256:4ec6a59df6bbafdab5f567754481657f7ed90dc9d69b0c9ff017907dd54faeff"}, + {file = "starlette-0.38.2.tar.gz", hash = "sha256:c7c0441065252160993a1a37cf2a73bb64d271b17303e0b0c1eb7191cfb12d75"}, ] [[package]] @@ -2665,17 +2548,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "toolz" -version = "0.12.1" -requires_python = ">=3.7" -summary = "List processing tools and functional utilities" -groups = ["default"] -files = [ - {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, - {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, -] - [[package]] name = "tornado" version = "6.4.1" @@ -2698,7 +2570,7 @@ files = [ [[package]] name = "tqdm" -version = "4.66.4" +version = "4.66.5" requires_python = ">=3.7" summary = "Fast, Extensible Progress Meter" groups = ["default"] @@ -2706,8 +2578,8 @@ dependencies = [ "colorama; platform_system == \"Windows\"", ] files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [[package]] @@ -2743,6 +2615,17 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2024.1" +requires_python = ">=2" +summary = "Provider of IANA time zone data" +groups = ["default"] +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "uri-template" version = "1.3.0" @@ -2767,7 +2650,7 @@ files = [ [[package]] name = "uvicorn" -version = "0.30.1" +version = "0.30.5" requires_python = ">=3.8" summary = "The lightning-fast ASGI server." groups = ["default"] @@ -2777,8 +2660,8 @@ dependencies = [ "typing-extensions>=4.0; python_version < \"3.11\"", ] files = [ - {file = "uvicorn-0.30.1-py3-none-any.whl", hash = "sha256:cd17daa7f3b9d7a24de3617820e634d0933b69eed8e33a516071174427238c81"}, - {file = "uvicorn-0.30.1.tar.gz", hash = "sha256:d46cd8e0fd80240baffbcd9ec1012a712938754afcf81bce56c024c1656aece8"}, + {file = "uvicorn-0.30.5-py3-none-any.whl", hash = "sha256:b2d86de274726e9878188fa07576c9ceeff90a839e2b6e25c917fe05f5a6c835"}, + {file = "uvicorn-0.30.5.tar.gz", hash = "sha256:ac6fdbd4425c5fd17a9fe39daf4d4d075da6fdc80f653e5894cdc2fd98752bee"}, ] [[package]] @@ -2799,10 +2682,6 @@ files = [ {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, - {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, - {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, @@ -2868,14 +2747,6 @@ files = [ {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"}, {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"}, {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"}, - {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"}, - {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"}, - {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"}, - {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"}, - {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"}, - {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"}, - {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"}, - {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"}, {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"}, ] @@ -2884,6 +2755,9 @@ name = "wcwidth" version = "0.2.13" summary = "Measures the displayed width of unicode strings in a terminal" groups = ["default"] +dependencies = [ + "backports-functools-lru-cache>=1.2.1; python_version < \"3.2\"", +] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -2945,29 +2819,19 @@ files = [ {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, ] [[package]] name = "wheel" -version = "0.43.0" +version = "0.44.0" requires_python = ">=3.8" summary = "A built-package format for Python" groups = ["default"] files = [ - {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"}, - {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"}, + {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"}, + {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"}, ] [[package]] diff --git a/docs/pyproject.toml b/docs/pyproject.toml index 2a4b0a5..73f84a0 100644 --- a/docs/pyproject.toml +++ b/docs/pyproject.toml @@ -14,7 +14,6 @@ dependencies = [ "furo>=2023.8.19", "myst-parser>=2.0.0", "autodoc-pydantic>=2.0.1", - "nptyping>=2.5.0", "sphinx-autobuild>=2021.3.14", "sphinx-design>=0.5.0", "sphinx-togglebutton>=0.3.2", diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index ca354a5..f8ea857 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -266,10 +266,7 @@ class NamespacesAdapter(Adapter): else: ns = ns[0] - schema_names = [] - for sch in ns.schema_: - if sch.source is not None: - schema_names.append(sch.source) + schema_names = [sch.source for sch in ns.schema_ if sch.source is not None] return schema_names def schema_namespace(self, name: str) -> Optional[str]: diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index faf9107..d080d03 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -133,7 +133,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) @@ -382,6 +382,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + DYNAMIC_TABLE_IMPORTS = Imports( imports=[ Import( @@ -417,7 +431,9 @@ Imports required for the dynamic table mixin VectorData is purposefully excluded as an import or an inject so that it will be resolved to the VectorData definition in the generated module """ -DYNAMIC_TABLE_INJECTS = [VectorDataMixin, VectorIndexMixin, DynamicTableMixin] - -# class VectorDataMixin(BaseModel): -# index: Optional[BaseModel] = None +DYNAMIC_TABLE_INJECTS = [ + VectorDataMixin, + VectorIndexMixin, + DynamicTableRegionMixin, + DynamicTableMixin, +] diff --git a/nwb_linkml/src/nwb_linkml/io/hdf5.py b/nwb_linkml/src/nwb_linkml/io/hdf5.py index d902bd9..387e4a6 100644 --- a/nwb_linkml/src/nwb_linkml/io/hdf5.py +++ b/nwb_linkml/src/nwb_linkml/io/hdf5.py @@ -242,10 +242,7 @@ def find_references(h5f: h5py.File, path: str) -> List[str]: def _find_references(name: str, obj: h5py.Group | h5py.Dataset) -> None: pbar.update() - refs = [] - for attr in obj.attrs.values(): - if isinstance(attr, h5py.h5r.Reference): - refs.append(attr) + refs = [attr for attr in obj.attrs.values() if isinstance(attr, h5py.h5r.Reference)] if isinstance(obj, h5py.Dataset): # dataset is all references diff --git a/nwb_linkml/src/nwb_linkml/maps/dtype.py b/nwb_linkml/src/nwb_linkml/maps/dtype.py index 1c90e0d..d618dbe 100644 --- a/nwb_linkml/src/nwb_linkml/maps/dtype.py +++ b/nwb_linkml/src/nwb_linkml/maps/dtype.py @@ -100,10 +100,9 @@ np_to_python = { np.float64, np.single, np.double, - np.float_, ) }, - **{n: str for n in (np.character, np.str_, np.string_, np.unicode_)}, + **{n: str for n in (np.character, np.str_)}, } allowed_precisions = { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index ab05d9d..8d3f9e9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -171,6 +171,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -281,7 +295,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 7da6b05..2be1fbe 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -171,6 +171,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -281,7 +295,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index a1ba281..6cf1bf2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -171,6 +171,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -281,7 +295,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index bd4f1aa..8e79364 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -172,6 +172,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -282,7 +296,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 2d676af..b78ba5c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -172,6 +172,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -282,7 +296,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 19f855b..b17a853 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -172,6 +172,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -282,7 +296,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index 1d1cbf4..536d5af 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -172,6 +172,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -282,7 +296,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index a911159..80d1d6d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -172,6 +172,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -282,7 +296,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 266c8d4..7ddc7ec 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -172,6 +172,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -282,7 +296,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index b1a9b5a..a8315f3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -172,6 +172,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -282,7 +296,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index ca52a0b..91e25e7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -172,6 +172,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -282,7 +296,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and len(val) > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index e6e364b..8e9d681 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -1,7 +1,12 @@ from __future__ import annotations -from ...hdmf_common.v1_8_0.hdmf_common_base import Data +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple from pydantic import ( BaseModel, ConfigDict, @@ -167,6 +172,20 @@ class VectorIndexMixin(BaseModel): return len(self.value) +class DynamicTableRegionMixin(BaseModel): + """ + Mixin to allow indexing references to regions of dynamictables + """ + + table: "DynamicTableMixin" + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + return self.table[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + self.table[key] = value + + class DynamicTableMixin(BaseModel): """ Mixin to make DynamicTable subclasses behave like tables/dataframes @@ -277,7 +296,7 @@ class DynamicTableMixin(BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and val.shape[0] > 1: + elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: # special case where we are returning a row in a ragged array, # same as above - prevent pandas pivoting to long val = Series([val]) diff --git a/nwb_linkml/src/nwb_linkml/types/df.py b/nwb_linkml/src/nwb_linkml/types/df.py index 2fcea88..19d36a5 100644 --- a/nwb_linkml/src/nwb_linkml/types/df.py +++ b/nwb_linkml/src/nwb_linkml/types/df.py @@ -14,180 +14,163 @@ Pydantic models that behave like pandas dataframes left in this module since it is necessary for it to make sense. """ -import ast -from typing import Any, Dict, Optional, Type +# +# class DataFrame(BaseModel, pd.DataFrame): +# """ +# Pydantic model root class that mimics a pandas dataframe. +# +# Notes: +# +# The synchronization between the underlying lists in the pydantic model +# and the derived dataframe is partial, and at the moment unidirectional. +# This class is primarily intended for reading from tables stored in +# NWB files rather than being able to manipulate them. +# +# The dataframe IS updated when new values are *assigned* to a field. +# +# eg.:: +# +# MyModel.fieldval = [1,2,3] +# +# But the dataframe is NOT updated when existing values are updated. +# +# eg.:: +# +# MyModel.fieldval.append(4) +# +# In that case you need to call :meth:`.update_df` manually. +# +# Additionally, if the dataframe is modified, the underlying lists are NOT updated, +# but when the model is dumped to a dictionary or serialized, the dataframe IS used, +# so changes will be reflected then. +# +# Fields that shadow pandas methods WILL prevent them from being usable, except +# by directly accessing the dataframe like ``mymodel._df`` +# +# """ +# +# _df: pd.DataFrame = None +# model_config = ConfigDict(validate_assignment=True) +# +# def __init__(self, **kwargs): +# # pdb.set_trace() +# super().__init__(**kwargs) +# +# self._df = self.__make_df() +# +# def __make_df(self) -> pd.DataFrame: +# # make dict that can handle ragged arrays and NoneTypes +# items = {k: v for k, v in self.__dict__.items() if k in self.model_fields} +# +# df_dict = { +# k: (pd.Series(v) if isinstance(v, list) else pd.Series([v])) for k, v in items.items() +# } +# df = pd.DataFrame(df_dict) +# # replace Nans with None +# df = df.fillna(np.nan).replace([np.nan], [None]) +# return df +# +# def update_df(self) -> None: +# """ +# Update the internal dataframe in the case that the model values are changed +# in a way that we can't detect, like appending to one of the lists. +# +# """ +# self._df = self.__make_df() +# +# def __getattr__(self, item: str): +# """ +# Mimic pandas dataframe and pydantic model behavior +# """ +# if item in ("df", "_df"): +# return self.__pydantic_private__["_df"] +# elif item in self.model_fields: +# return self._df[item] +# else: +# try: +# return object.__getattribute__(self._df, item) +# except AttributeError: +# return object.__getattribute__(self, item) +# +# @model_validator(mode="after") +# def recreate_df(self) -> None: +# """ +# Remake DF when validating (eg. when updating values on assignment) +# """ +# self.update_df() +# +# @model_serializer(mode="wrap", when_used="always") +# def serialize_model(self, nxt: SerializerFunctionWrapHandler) -> Dict[str, Any]: +# """ +# We don't handle values that are changed on the dataframe by directly +# updating the underlying model lists, but we implicitly handle them +# by using the dataframe as the source when serializing +# """ +# if self._df is None: +# return nxt(self) +# else: +# out = self._df.to_dict("list") +# # remove Nones +# out = {k: [inner_v for inner_v in v if inner_v is not None] for k, v in out.items()} +# return nxt(self.__class__(**out)) -import h5py -import numpy as np -import pandas as pd -from pydantic import ( - BaseModel, - ConfigDict, - SerializerFunctionWrapHandler, - model_serializer, - model_validator, -) - -from nwb_linkml.maps.hdmf import dereference_reference_vector, model_from_dynamictable -from nwb_linkml.types.hdf5 import HDF5_Path - - -class DataFrame(BaseModel, pd.DataFrame): - """ - Pydantic model root class that mimics a pandas dataframe. - - Notes: - - The synchronization between the underlying lists in the pydantic model - and the derived dataframe is partial, and at the moment unidirectional. - This class is primarily intended for reading from tables stored in - NWB files rather than being able to manipulate them. - - The dataframe IS updated when new values are *assigned* to a field. - - eg.:: - - MyModel.fieldval = [1,2,3] - - But the dataframe is NOT updated when existing values are updated. - - eg.:: - - MyModel.fieldval.append(4) - - In that case you need to call :meth:`.update_df` manually. - - Additionally, if the dataframe is modified, the underlying lists are NOT updated, - but when the model is dumped to a dictionary or serialized, the dataframe IS used, - so changes will be reflected then. - - Fields that shadow pandas methods WILL prevent them from being usable, except - by directly accessing the dataframe like ``mymodel._df`` - - """ - - _df: pd.DataFrame = None - model_config = ConfigDict(validate_assignment=True) - - def __init__(self, **kwargs): - # pdb.set_trace() - super().__init__(**kwargs) - - self._df = self.__make_df() - - def __make_df(self) -> pd.DataFrame: - # make dict that can handle ragged arrays and NoneTypes - items = {k: v for k, v in self.__dict__.items() if k in self.model_fields} - - df_dict = { - k: (pd.Series(v) if isinstance(v, list) else pd.Series([v])) for k, v in items.items() - } - df = pd.DataFrame(df_dict) - # replace Nans with None - df = df.fillna(np.nan).replace([np.nan], [None]) - return df - - def update_df(self) -> None: - """ - Update the internal dataframe in the case that the model values are changed - in a way that we can't detect, like appending to one of the lists. - - """ - self._df = self.__make_df() - - def __getattr__(self, item: str): - """ - Mimic pandas dataframe and pydantic model behavior - """ - if item in ("df", "_df"): - return self.__pydantic_private__["_df"] - elif item in self.model_fields: - return self._df[item] - else: - try: - return object.__getattribute__(self._df, item) - except AttributeError: - return object.__getattribute__(self, item) - - @model_validator(mode="after") - def recreate_df(self) -> None: - """ - Remake DF when validating (eg. when updating values on assignment) - """ - self.update_df() - - @model_serializer(mode="wrap", when_used="always") - def serialize_model(self, nxt: SerializerFunctionWrapHandler) -> Dict[str, Any]: - """ - We don't handle values that are changed on the dataframe by directly - updating the underlying model lists, but we implicitly handle them - by using the dataframe as the source when serializing - """ - if self._df is None: - return nxt(self) - else: - out = self._df.to_dict("list") - # remove Nones - out = {k: [inner_v for inner_v in v if inner_v is not None] for k, v in out.items()} - return nxt(self.__class__(**out)) - - -def dynamictable_to_df( - group: h5py.Group, model: Optional[Type[DataFrame]] = None, base: Optional[BaseModel] = None -) -> DataFrame: - """Generate a dataframe from an NDB DynamicTable""" - if model is None: - model = model_from_dynamictable(group, base) - - items = {} - for col, _col_type in model.model_fields.items(): - if col not in group: - continue - idxname = col + "_index" - if idxname in group: - idx = group.get(idxname)[:] - data = group.get(col)[idx - 1] - else: - data = group.get(col)[:] - - # Handle typing inside of list - if isinstance(data[0], bytes): - data = data.astype("unicode") - if isinstance(data[0], str): - # lists and other compound data types can get flattened out to strings when stored - # so we try and literal eval and recover them - try: - eval_type = type(ast.literal_eval(data[0])) - except (ValueError, SyntaxError): - eval_type = str - - # if we've found one of those, get the data type within it. - if eval_type is not str: - eval_list = [] - for item in data.tolist(): - try: - eval_list.append(ast.literal_eval(item)) - except ValueError: - eval_list.append(None) - data = eval_list - elif isinstance(data[0], h5py.h5r.Reference): - data = [HDF5_Path(group[d].name) for d in data] - elif isinstance(data[0], tuple) and any( - [isinstance(d, h5py.h5r.Reference) for d in data[0]] - ): - # references stored inside a tuple, reference + location. - # dereference them!? - dset = group.get(col) - names = dset.dtype.names - if names is not None and names[0] == "idx_start" and names[1] == "count": - data = dereference_reference_vector(dset, data) - - else: - data = data.tolist() - - # After list, check if we need to put this thing inside of - # another class, as indicated by the enclosing model - - items[col] = data - - return model(hdf5_path=group.name, name=group.name.split("/")[-1], **items) +# +# def dynamictable_to_df( +# group: h5py.Group, model: Optional[Type[DataFrame]] = None, base: Optional[BaseModel] = None +# ) -> DataFrame: +# """Generate a dataframe from an NDB DynamicTable""" +# if model is None: +# model = model_from_dynamictable(group, base) +# +# items = {} +# for col, _col_type in model.model_fields.items(): +# if col not in group: +# continue +# idxname = col + "_index" +# if idxname in group: +# idx = group.get(idxname)[:] +# data = group.get(col)[idx - 1] +# else: +# data = group.get(col)[:] +# +# # Handle typing inside of list +# if isinstance(data[0], bytes): +# data = data.astype("unicode") +# if isinstance(data[0], str): +# # lists and other compound data types can get flattened out to strings when stored +# # so we try and literal eval and recover them +# try: +# eval_type = type(ast.literal_eval(data[0])) +# except (ValueError, SyntaxError): +# eval_type = str +# +# # if we've found one of those, get the data type within it. +# if eval_type is not str: +# eval_list = [] +# for item in data.tolist(): +# try: +# eval_list.append(ast.literal_eval(item)) +# except ValueError: +# eval_list.append(None) +# data = eval_list +# elif isinstance(data[0], h5py.h5r.Reference): +# data = [HDF5_Path(group[d].name) for d in data] +# elif isinstance(data[0], tuple) and any( +# [isinstance(d, h5py.h5r.Reference) for d in data[0]] +# ): +# # references stored inside a tuple, reference + location. +# # dereference them!? +# dset = group.get(col) +# names = dset.dtype.names +# if names is not None and names[0] == "idx_start" and names[1] == "count": +# data = dereference_reference_vector(dset, data) +# +# else: +# data = data.tolist() +# +# # After list, check if we need to put this thing inside of +# # another class, as indicated by the enclosing model +# +# items[col] = data +# +# return model(hdf5_path=group.name, name=group.name.split("/")[-1], **items) diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 557e7db..8e1bd64 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -67,8 +67,9 @@ def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]: """ n_units = 24 + generator = np.random.default_rng() spike_times = [ - np.full(shape=np.random.randint(10, 50), fill_value=i, dtype=float) for i in range(n_units) + np.full(shape=generator.integers(10, 50), fill_value=i, dtype=float) for i in range(n_units) ] spike_idx = [] for i in range(n_units): @@ -141,6 +142,19 @@ def test_dynamictable_indexing(electrical_series): assert subsection.dtypes.values.tolist() == dtypes[0:3] +def test_dynamictable_region(electrical_series): + """ + Dynamictableregion should + Args: + electrical_series: + + Returns: + + """ + series, electrodes = electrical_series + + + def test_dynamictable_ragged_arrays(units): """ Should be able to index ragged arrays using an implicit _index column diff --git a/nwb_linkml/tests/test_providers/test_provider_schema.py b/nwb_linkml/tests/test_providers/test_provider_schema.py index e92e466..9da6296 100644 --- a/nwb_linkml/tests/test_providers/test_provider_schema.py +++ b/nwb_linkml/tests/test_providers/test_provider_schema.py @@ -4,8 +4,9 @@ from pathlib import Path from typing import Optional import pytest -from nptyping import Shape, UByte -from numpydantic import NDArray +from numpydantic import NDArray, Shape +import numpy as np + import nwb_linkml from nwb_linkml.maps.naming import version_module_case @@ -77,7 +78,7 @@ def test_linkml_build_from_yaml(tmp_output_dir): "comments": Optional[str], "data": "TimeSeriesData", "timestamps": "Optional", # __name__ just gets the first part of Optional[TimeSeriesTimestamps] - "control": Optional[NDArray[Shape["* num_times"], UByte]], + "control": Optional[NDArray[Shape["* num_times"], np.uint8]], }, ) ], diff --git a/pyproject.toml b/pyproject.toml index eae490f..b8723db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,6 +58,10 @@ select = [ "D210", "D211", # emptiness "D419", + # perf + "PERF", + # numpy + "NPY", ] ignore = [ # annotations for *args and **kwargs diff --git a/scripts/generate_core.py b/scripts/generate_core.py index 6cbb83f..35faf43 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -3,6 +3,7 @@ import os import sys import traceback from pdb import post_mortem +import subprocess from argparse import ArgumentParser from pathlib import Path @@ -179,6 +180,8 @@ def generate_versions( with open(pydantic_path / "__init__.py", "w") as initfile: initfile.write(f"from .pydantic.core.{latest_version.name}.namespace import *") + subprocess.run(["black", "."]) + finally: if len(failed_versions) > 0: print("Failed Building Versions:") From a9909485a4b1511f87e4bd801550ccacc0031b6e Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 7 Aug 2024 02:03:04 -0700 Subject: [PATCH 29/61] my god it works but what have i done --- .../src/nwb_linkml/generators/pydantic.py | 2 + nwb_linkml/src/nwb_linkml/includes/hdmf.py | 80 ++++++++++--- .../hdmf_common/v1_1_0/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_1_2/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_1_3/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_2_0/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_2_1/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_3_0/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_4_0/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_5_0/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_5_1/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_6_0/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_7_0/hdmf_common_table.py | 65 ++++++++--- .../hdmf_common/v1_8_0/hdmf_common_table.py | 65 ++++++++--- nwb_linkml/tests/test_includes/test_hdmf.py | 105 ++++++++++++++---- .../test_providers/test_provider_schema.py | 3 +- 16 files changed, 725 insertions(+), 245 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index f8c8033..e1f07af 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -250,6 +250,8 @@ class AfterGenerateClass: cls.cls.bases = ["VectorDataMixin"] elif cls.cls.name == "VectorIndex": cls.cls.bases = ["VectorIndexMixin"] + elif cls.cls.name == "DynamicTableRegion": + cls.cls.bases = ["DynamicTableRegionMixin", "VectorData"] return cls diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index d080d03..506f098 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -2,7 +2,18 @@ Special types for mimicking HDMF special case behavior """ -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Tuple, Union, overload +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Dict, + Iterable, + List, + Optional, + Tuple, + Union, + overload, +) import numpy as np from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport @@ -69,7 +80,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -77,6 +88,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -96,7 +108,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -107,7 +119,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -120,7 +132,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -128,7 +140,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -339,22 +355,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -387,13 +403,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value DYNAMIC_TABLE_IMPORTS = Imports( @@ -405,8 +448,9 @@ DYNAMIC_TABLE_IMPORTS = Imports( module="typing", objects=[ ObjectImport(name="ClassVar"), - ObjectImport(name="overload"), + ObjectImport(name="Iterable"), ObjectImport(name="Tuple"), + ObjectImport(name="overload"), ], ), Import( diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 8d3f9e9..d75a127 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -128,22 +128,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -176,13 +176,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -258,7 +285,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -269,7 +296,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -282,7 +309,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -290,7 +317,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -515,7 +546,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 2be1fbe..3882294 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -128,22 +128,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -176,13 +176,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -258,7 +285,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -269,7 +296,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -282,7 +309,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -290,7 +317,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -515,7 +546,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 6cf1bf2..8df75da 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -128,22 +128,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -176,13 +176,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -258,7 +285,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -269,7 +296,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -282,7 +309,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -290,7 +317,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -526,7 +557,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index 8e79364..0823281 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -129,22 +129,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -177,13 +177,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -259,7 +286,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -270,7 +297,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -283,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -291,7 +318,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -506,7 +537,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index b78ba5c..88405cc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -129,22 +129,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -177,13 +177,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -259,7 +286,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -270,7 +297,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -283,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -291,7 +318,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -506,7 +537,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index b17a853..545f0e9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -129,22 +129,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -177,13 +177,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -259,7 +286,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -270,7 +297,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -283,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -291,7 +318,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -506,7 +537,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index 536d5af..c3fb548 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -129,22 +129,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -177,13 +177,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -259,7 +286,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -270,7 +297,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -283,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -291,7 +318,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -506,7 +537,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 80d1d6d..5dda9ab 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, ConfigDict, @@ -129,22 +129,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -177,13 +177,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -259,7 +286,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -270,7 +297,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -283,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -291,7 +318,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -506,7 +537,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 7ddc7ec..910b294 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, ConfigDict, @@ -129,22 +129,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -177,13 +177,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -259,7 +286,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -270,7 +297,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -283,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -291,7 +318,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -506,7 +537,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index a8315f3..3df1e78 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, ConfigDict, @@ -129,22 +129,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -177,13 +177,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -259,7 +286,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -270,7 +297,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -283,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -291,7 +318,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -506,7 +537,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 91e25e7..3e438ce 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, ConfigDict, @@ -129,22 +129,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -177,13 +177,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -259,7 +286,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -270,7 +297,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -283,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -291,7 +318,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -506,7 +537,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 8e9d681..c016f61 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -6,7 +6,7 @@ import re import sys from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container from pandas import DataFrame, Series -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Tuple +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, ConfigDict, @@ -129,22 +129,22 @@ class VectorIndexMixin(BaseModel): """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ - start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] return self.target.value[slice(start, end)] - def __getitem__(self, item: Union[int, slice]) -> Any: + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] - elif isinstance(self.target, VectorData): + else: if isinstance(item, int): return self._getitem_helper(item) + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self._getitem_helper(i) for i in item] else: - idx = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in idx] - else: - raise AttributeError(f"Could not index with {item}") + raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: if self._index: @@ -177,13 +177,40 @@ class DynamicTableRegionMixin(BaseModel): Mixin to allow indexing references to regions of dynamictables """ - table: "DynamicTableMixin" + _index: Optional["VectorIndex"] = None - def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: - return self.table[item] + table: "DynamicTableMixin" + value: Optional[NDArray] = None + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + """ + Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite + this being a subclass of ``VectorData`` + """ + if self._index: + if isinstance(item, int): + # index returns an array of indices, + # and indexing table with an array returns a list of rows + return self.table[self._index[item]] + elif isinstance(item, slice): + # index returns a list of arrays of indices, + # so we index table with an array to construct + # a list of lists of rows + return [self.table[idx] for idx in self._index[item]] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") + else: + if isinstance(item, int): + return self.table[self.value[item]] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.value))) + return [self.table[self.value[i]] for i in item] + else: + raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[key] = value + self.table[self.value[key]] = value class DynamicTableMixin(BaseModel): @@ -259,7 +286,7 @@ class DynamicTableMixin(BaseModel): """ if isinstance(item, str): return self._columns[item] - if isinstance(item, (int, slice)): + if isinstance(item, (int, slice, np.integer, np.ndarray)): return DataFrame.from_dict(self._slice_range(item)) elif isinstance(item, tuple): if len(item) != 2: @@ -270,7 +297,7 @@ class DynamicTableMixin(BaseModel): # all other cases are tuples of (rows, cols) rows, cols = item - if isinstance(cols, (int, slice)): + if isinstance(cols, (int, slice, np.integer)): cols = self.colnames[cols] if isinstance(rows, int) and isinstance(cols, str): @@ -283,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") def _slice_range( - self, rows: Union[int, slice], cols: Optional[Union[str, List[str]]] = None + self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: if cols is None: cols = self.colnames @@ -291,7 +318,11 @@ class DynamicTableMixin(BaseModel): cols = [cols] data = {} for k in cols: - val = self._columns[k][rows] + if isinstance(rows, np.ndarray): + val = [self._columns[k][i] for i in rows] + else: + val = self._columns[k][rows] + if isinstance(val, BaseModel): # special case where pandas will unpack a pydantic model # into {n_fields} rows, rather than keeping it in a dict @@ -506,7 +537,7 @@ class ElementIdentifiers(Data): ) -class DynamicTableRegion(VectorData): +class DynamicTableRegion(DynamicTableRegionMixin, VectorData): """ DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. """ diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 8e1bd64..87e1c88 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -6,11 +6,13 @@ import pytest # FIXME: Make this just be the output of the provider by patching into import machinery from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( Device, + DynamicTable, DynamicTableRegion, ElectricalSeries, ElectrodeGroup, ExtracellularEphysElectrodes, Units, + VectorIndex, ) @@ -49,7 +51,10 @@ def electrical_series() -> Tuple["ElectricalSeries", "ExtracellularEphysElectrod electrical_series = ElectricalSeries( name="my recording!", electrodes=DynamicTableRegion( - table=electrodes, value=np.arange(0, n_electrodes), name="electrodes", description="hey" + table=electrodes, + value=np.arange(n_electrodes - 1, -1, step=-1), + name="electrodes", + description="hey", ), timestamps=timestamps, data=data, @@ -57,16 +62,7 @@ def electrical_series() -> Tuple["ElectricalSeries", "ExtracellularEphysElectrod return electrical_series, electrodes -@pytest.fixture(params=[True, False]) -def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]: - """ - Test case for units - - Parameterized by extra_column because pandas likes to pivot dataframes - to long when there is only one column and it's not len() == 1 - """ - - n_units = 24 +def _ragged_array(n_units: int) -> tuple[list[np.ndarray], np.ndarray]: generator = np.random.default_rng() spike_times = [ np.full(shape=generator.integers(10, 50), fill_value=i, dtype=float) for i in range(n_units) @@ -78,6 +74,18 @@ def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]: else: spike_idx.append(len(spike_times[i]) + spike_idx[i - 1]) spike_idx = np.array(spike_idx) + return spike_times, spike_idx + + +@pytest.fixture(params=[True, False]) +def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]: + """ + Test case for units + + Parameterized by extra_column because pandas likes to pivot dataframes + to long when there is only one column and it's not len() == 1 + """ + spike_times, spike_idx = _ragged_array(24) spike_times_flat = np.concatenate(spike_times) @@ -87,7 +95,7 @@ def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]: "spike_times_index": spike_idx, } if request.param: - kwargs["extra_column"] = ["hey!"] * n_units + kwargs["extra_column"] = ["hey!"] * 24 units = Units(**kwargs) return units, spike_times, spike_idx @@ -142,20 +150,75 @@ def test_dynamictable_indexing(electrical_series): assert subsection.dtypes.values.tolist() == dtypes[0:3] -def test_dynamictable_region(electrical_series): +def test_dynamictable_region_basic(electrical_series): """ - Dynamictableregion should - Args: - electrical_series: - - Returns: - + DynamicTableRegion should be able to refer to a row or rows of another table + itself as a column within a table """ series, electrodes = electrical_series - + row = series.electrodes[0] + # check that we correctly got the 4th row instead of the 0th row, + # since the indexed table was constructed with inverted indexes because it's a test, ya dummy. + # we will only vaguely check the basic functionality here bc + # a) the indexing behavior of the indexed objects is tested above, and + # b) every other object in the chain is strictly validated, + # so we assume if we got a right shaped df that it is the correct one. + # feel free to @ me when i am wrong about this + assert row.id == 4 + assert row.shape == (1, 7) + # and we should still be preserving the model that is the contents of the cell of this row + # so this is a dataframe row with a column "group" that contains an array of ElectrodeGroup + # objects and that's as far as we are going to chase the recursion in this basic indexing test + # ElectrodeGroup is strictly validating so an instance check is all we need. + assert isinstance(row.group.values[0], ElectrodeGroup) + + # getting a list of table rows is actually correct behavior here because + # this list of table rows is actually the cell of another table + rows = series.electrodes[0:3] + assert all([row.id == idx for row, idx in zip(rows, [4, 3, 2])]) -def test_dynamictable_ragged_arrays(units): +def test_dynamictable_region_ragged(): + """ + Dynamictables can also have indexes so that they are ragged arrays of column rows + """ + spike_times, spike_idx = _ragged_array(24) + spike_times_flat = np.concatenate(spike_times) + + # construct a secondary index that selects overlapping segments of the first table + value = np.array([0, 1, 2, 1, 2, 3, 2, 3, 4]) + idx = np.array([3, 6, 9]) + + table = DynamicTable( + name="table", + description="a table what else would it be", + id=np.arange(len(spike_idx)), + timeseries=spike_times, + timeseries_index=spike_idx, + ) + region = DynamicTableRegion( + name="dynamictableregion", + description="this field should be optional", + table=table, + value=value, + ) + index = VectorIndex(name="index", description="hgggggggjjjj", target=region, value=idx) + region._index = index + rows = region[1] + # i guess this is right? + # the region should be a set of three rows of the table, with a ragged array column timeseries + # like... + # + # id timeseries + # 0 1 [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, ... + # 1 2 [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, ... + # 2 3 [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, ... + assert rows.shape(3, 2) + assert all(rows.id == [1, 2, 3]) + assert all([all(row[1].timeseries == i) for i, row in zip([1, 2, 3], rows.iterrows())]) + + +def test_dynamictable_ragged(units): """ Should be able to index ragged arrays using an implicit _index column diff --git a/nwb_linkml/tests/test_providers/test_provider_schema.py b/nwb_linkml/tests/test_providers/test_provider_schema.py index 9da6296..a455e29 100644 --- a/nwb_linkml/tests/test_providers/test_provider_schema.py +++ b/nwb_linkml/tests/test_providers/test_provider_schema.py @@ -3,10 +3,9 @@ import sys from pathlib import Path from typing import Optional +import numpy as np import pytest from numpydantic import NDArray, Shape -import numpy as np - import nwb_linkml from nwb_linkml.maps.naming import version_module_case From 9e7c53344dbda15dacb8d3375f0f7cdb41dbb48e Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 7 Aug 2024 02:28:06 -0700 Subject: [PATCH 30/61] oop --- nwb_linkml/tests/test_includes/test_hdmf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 87e1c88..2b5d47f 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -213,7 +213,7 @@ def test_dynamictable_region_ragged(): # 0 1 [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, ... # 1 2 [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, ... # 2 3 [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, ... - assert rows.shape(3, 2) + assert rows.shape == (3, 2) assert all(rows.id == [1, 2, 3]) assert all([all(row[1].timeseries == i) for i, row in zip([1, 2, 3], rows.iterrows())]) From 362965daf5fb6536e7cac5541f42db58c58c1378 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 7 Aug 2024 18:56:01 -0700 Subject: [PATCH 31/61] correct test for equality for series --- nwb_linkml/tests/test_includes/test_hdmf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 2b5d47f..6ade3a2 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -164,7 +164,7 @@ def test_dynamictable_region_basic(electrical_series): # b) every other object in the chain is strictly validated, # so we assume if we got a right shaped df that it is the correct one. # feel free to @ me when i am wrong about this - assert row.id == 4 + assert all(row.id == 4) assert row.shape == (1, 7) # and we should still be preserving the model that is the contents of the cell of this row # so this is a dataframe row with a column "group" that contains an array of ElectrodeGroup From cebb21993d3e943d1c5a089a859becd75f2d2407 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 7 Aug 2024 19:22:29 -0700 Subject: [PATCH 32/61] actually fix indexing --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 16 ++++---- .../hdmf_common/v1_1_0/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_1_2/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_1_3/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_2_0/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_2_1/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_3_0/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_4_0/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_5_0/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_5_1/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_6_0/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_7_0/hdmf_common_table.py | 19 +++++----- .../hdmf_common/v1_8_0/hdmf_common_table.py | 19 +++++----- nwb_linkml/tests/test_includes/test_hdmf.py | 38 +++++++++---------- 14 files changed, 134 insertions(+), 148 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 506f098..1e0c3f7 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -109,7 +109,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -127,10 +127,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -145,14 +147,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index d75a127..41ca9bd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -258,7 +258,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -266,6 +266,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -286,7 +287,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -304,10 +305,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -322,14 +325,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 3882294..927d9c0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -258,7 +258,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -266,6 +266,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -286,7 +287,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -304,10 +305,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -322,14 +325,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 8df75da..01324a9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -258,7 +258,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -266,6 +266,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -286,7 +287,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -304,10 +305,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -322,14 +325,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index 0823281..f9f4450 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -259,7 +259,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -267,6 +267,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -287,7 +288,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -305,10 +306,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -323,14 +326,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 88405cc..e297747 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -259,7 +259,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -267,6 +267,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -287,7 +288,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -305,10 +306,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -323,14 +326,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 545f0e9..50eeb23 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -259,7 +259,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -267,6 +267,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -287,7 +288,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -305,10 +306,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -323,14 +326,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index c3fb548..affd5dc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -259,7 +259,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -267,6 +267,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -287,7 +288,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -305,10 +306,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -323,14 +326,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 5dda9ab..5b99f2c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -259,7 +259,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -267,6 +267,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -287,7 +288,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -305,10 +306,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -323,14 +326,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 910b294..2eb4675 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -259,7 +259,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -267,6 +267,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -287,7 +288,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -305,10 +306,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -323,14 +326,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index 3df1e78..d578633 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -259,7 +259,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -267,6 +267,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -287,7 +288,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -305,10 +306,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -323,14 +326,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 3e438ce..1d6e89f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -259,7 +259,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -267,6 +267,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -287,7 +288,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -305,10 +306,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -323,14 +326,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index c016f61..2c1798b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -259,7 +259,7 @@ class DynamicTableMixin(BaseModel): ]: ... @overload - def __getitem__(self, item: slice) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... def __getitem__( self, @@ -267,6 +267,7 @@ class DynamicTableMixin(BaseModel): str, int, slice, + "NDArray", Tuple[int, Union[int, str]], Tuple[Union[int, slice], ...], ], @@ -287,7 +288,7 @@ class DynamicTableMixin(BaseModel): if isinstance(item, str): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): - return DataFrame.from_dict(self._slice_range(item)) + data = self._slice_range(item) elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -305,10 +306,12 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) - return DataFrame.from_dict(data) else: raise ValueError(f"Unsure how to get item with key {item}") + # cast to DF + return DataFrame(data) + def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None ) -> Dict[str, Union[list, "NDArray", "VectorData"]]: @@ -323,14 +326,10 @@ class DynamicTableMixin(BaseModel): else: val = self._columns[k][rows] - if isinstance(val, BaseModel): - # special case where pandas will unpack a pydantic model - # into {n_fields} rows, rather than keeping it in a dict - val = Series([val]) - elif isinstance(rows, int) and hasattr(val, "shape") and val.shape and len(val) > 1: - # special case where we are returning a row in a ragged array, - # same as above - prevent pandas pivoting to long + # scalars need to be wrapped in series for pandas + if not isinstance(rows, (Iterable, slice)): val = Series([val]) + data[k] = val return data diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 6ade3a2..2d07d2d 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -150,6 +150,24 @@ def test_dynamictable_indexing(electrical_series): assert subsection.dtypes.values.tolist() == dtypes[0:3] +def test_dynamictable_ragged(units): + """ + Should be able to index ragged arrays using an implicit _index column + + Also tests: + - passing arrays directly instead of wrapping in vectordata/index specifically, + if the models in the fixture instantiate then this works + """ + units, spike_times, spike_idx = units + + # ensure we don't pivot to long when indexing + assert units[0].shape[0] == 1 + # check that we got the indexing boundaries corrunect + # (and that we are forwarding attr calls to the dataframe by accessing shape + for i in range(units.shape[0]): + assert np.all(units.iloc[i, 0] == spike_times[i]) + + def test_dynamictable_region_basic(electrical_series): """ DynamicTableRegion should be able to refer to a row or rows of another table @@ -175,7 +193,7 @@ def test_dynamictable_region_basic(electrical_series): # getting a list of table rows is actually correct behavior here because # this list of table rows is actually the cell of another table rows = series.electrodes[0:3] - assert all([row.id == idx for row, idx in zip(rows, [4, 3, 2])]) + assert all([all(row.id == idx) for row, idx in zip(rows, [4, 3, 2])]) def test_dynamictable_region_ragged(): @@ -218,24 +236,6 @@ def test_dynamictable_region_ragged(): assert all([all(row[1].timeseries == i) for i, row in zip([1, 2, 3], rows.iterrows())]) -def test_dynamictable_ragged(units): - """ - Should be able to index ragged arrays using an implicit _index column - - Also tests: - - passing arrays directly instead of wrapping in vectordata/index specifically, - if the models in the fixture instantiate then this works - """ - units, spike_times, spike_idx = units - - # ensure we don't pivot to long when indexing - assert units[0].shape[0] == 1 - # check that we got the indexing boundaries corrunect - # (and that we are forwarding attr calls to the dataframe by accessing shape - for i in range(units.shape[0]): - assert np.all(units.iloc[i, 0] == spike_times[i]) - - def test_dynamictable_append_column(): pass From 92d28baedda7d2030e9a57e6a6b2df1c0b2b017f Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 7 Aug 2024 20:23:18 -0700 Subject: [PATCH 33/61] coercion for extra columns passed as arrays --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 40 ++++++++++++++++--- .../hdmf_common/v1_1_0/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_1_2/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_1_3/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_2_0/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_2_1/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_3_0/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_4_0/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_5_0/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_5_1/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_6_0/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_7_0/hdmf_common_table.py | 40 ++++++++++++++++--- .../hdmf_common/v1_8_0/hdmf_common_table.py | 40 ++++++++++++++++--- nwb_linkml/tests/test_includes/test_hdmf.py | 9 ++++- 14 files changed, 463 insertions(+), 66 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 1e0c3f7..c34bf9a 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -227,6 +227,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -263,11 +289,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) @@ -361,7 +391,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -412,7 +442,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -424,7 +454,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 41ca9bd..1c07ed3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -136,7 +136,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -187,7 +187,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -199,7 +199,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -405,6 +405,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -441,11 +467,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 927d9c0..36a79bb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -136,7 +136,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -187,7 +187,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -199,7 +199,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -405,6 +405,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -441,11 +467,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 01324a9..a5477d8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -136,7 +136,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -187,7 +187,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -199,7 +199,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -405,6 +405,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -441,11 +467,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index f9f4450..bd03453 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -137,7 +137,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -188,7 +188,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -200,7 +200,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -406,6 +406,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -442,11 +468,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index e297747..82c84bf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -137,7 +137,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -188,7 +188,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -200,7 +200,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -406,6 +406,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -442,11 +468,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 50eeb23..23f75ee 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -137,7 +137,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -188,7 +188,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -200,7 +200,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -406,6 +406,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -442,11 +468,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index affd5dc..e5d4abc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -137,7 +137,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -188,7 +188,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -200,7 +200,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -406,6 +406,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -442,11 +468,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 5b99f2c..46796a1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -137,7 +137,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -188,7 +188,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -200,7 +200,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -406,6 +406,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -442,11 +468,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 2eb4675..9880ee8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -137,7 +137,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -188,7 +188,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -200,7 +200,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -406,6 +406,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -442,11 +468,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index d578633..158f8c1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -137,7 +137,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -188,7 +188,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -200,7 +200,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -406,6 +406,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -442,11 +468,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 1d6e89f..3ffb25d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -137,7 +137,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -188,7 +188,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -200,7 +200,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -406,6 +406,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -442,11 +468,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 2c1798b..de43571 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -137,7 +137,7 @@ class VectorIndexMixin(BaseModel): if self.target is None: return self.value[item] else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self._getitem_helper(item) elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -188,7 +188,7 @@ class DynamicTableRegionMixin(BaseModel): this being a subclass of ``VectorData`` """ if self._index: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): # index returns an array of indices, # and indexing table with an array returns a list of rows return self.table[self._index[item]] @@ -200,7 +200,7 @@ class DynamicTableRegionMixin(BaseModel): else: raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: - if isinstance(item, int): + if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): @@ -406,6 +406,32 @@ class DynamicTableMixin(BaseModel): model["colnames"].extend(colnames) return model + @model_validator(mode="after") + def cast_extra_columns(self): + """ + If extra columns are passed as just lists or arrays, cast to VectorData + before we resolve targets for VectorData and VectorIndex pairs. + + See :meth:`.cast_specified_columns` for handling columns in the class specification + """ + # if columns are not in the specification, cast to a generic VectorData + for key, val in self.__pydantic_extra__.items(): + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + self.__pydantic_extra__[key] = VectorIndex( + name=key, description="", value=val + ) + else: + self.__pydantic_extra__[key] = VectorData( + name=key, description="", value=val + ) + except ValidationError as e: + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e + return self + @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": """ @@ -442,11 +468,15 @@ class DynamicTableMixin(BaseModel): @field_validator("*", mode="wrap") @classmethod - def cast_columns( + def cast_specified_columns( cls, val: Any, handler: ValidatorFunctionWrapHandler, info: ValidationInfo ) -> Any: """ - If columns are supplied as arrays, try casting them to the type before validating + If columns *in* the model specification are supplied as arrays, + try casting them to the type before validating. + + Columns that are not in the spec are handled separately in + :meth:`.cast_extra_columns` """ try: return handler(val) diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 2d07d2d..e00c02e 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -211,7 +211,7 @@ def test_dynamictable_region_ragged(): name="table", description="a table what else would it be", id=np.arange(len(spike_idx)), - timeseries=spike_times, + timeseries=spike_times_flat, timeseries_index=spike_idx, ) region = DynamicTableRegion( @@ -242,3 +242,10 @@ def test_dynamictable_append_column(): def test_dynamictable_append_row(): pass + + +def test_dynamictable_extra_coercion(): + """ + Extra fields should be coerced to VectorData and have their + indexing relationships handled when passed as plain arrays. + """ From 54c18e333e4e7947a932c7f56d7bca4b557e1f09 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 8 Aug 2024 00:31:41 -0700 Subject: [PATCH 34/61] initial draft of aligned dynamic table before testing and refining. switch pandas import to module-level --- docs/intro/translation.md | 4 + .../src/nwb_linkml/generators/pydantic.py | 2 + nwb_linkml/src/nwb_linkml/includes/hdmf.py | 86 +++++++++++++++++-- .../hdmf_common/v1_1_0/hdmf_common_table.py | 81 +++++++++++++++-- .../hdmf_common/v1_1_2/hdmf_common_table.py | 81 +++++++++++++++-- .../hdmf_common/v1_1_3/hdmf_common_table.py | 81 +++++++++++++++-- .../hdmf_common/v1_2_0/hdmf_common_table.py | 81 +++++++++++++++-- .../hdmf_common/v1_2_1/hdmf_common_table.py | 81 +++++++++++++++-- .../hdmf_common/v1_3_0/hdmf_common_table.py | 81 +++++++++++++++-- .../hdmf_common/v1_4_0/hdmf_common_table.py | 81 +++++++++++++++-- .../hdmf_common/v1_5_0/hdmf_common_table.py | 83 ++++++++++++++++-- .../hdmf_common/v1_5_1/hdmf_common_table.py | 83 ++++++++++++++++-- .../hdmf_common/v1_6_0/hdmf_common_table.py | 83 ++++++++++++++++-- .../hdmf_common/v1_7_0/hdmf_common_table.py | 83 ++++++++++++++++-- .../hdmf_common/v1_8_0/hdmf_common_table.py | 83 ++++++++++++++++-- 15 files changed, 988 insertions(+), 86 deletions(-) diff --git a/docs/intro/translation.md b/docs/intro/translation.md index 613b93f..899dfbe 100644 --- a/docs/intro/translation.md +++ b/docs/intro/translation.md @@ -305,6 +305,10 @@ There are several different ways to create references between objects in nwb/hdm target_type: ElectrodeGroup reftype: object ``` +- `TimeSeriesReferenceVectorData` is a compound dtype that behaves like VectorData and VectorIndex combined + into a single type. It is slightly different in that each row of the vector can refer to a different table, + and has a different way of handling selection (with `start` and `count` + rather than a series of indices for the end of each cell) - Implicitly, hdmf creates references between objects according to some naming conventions, eg. an attribute/dataset that is a `VectorIndex` named `mydata_index` will be linked to a `VectorData` object `mydata`. diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index e1f07af..109ce5b 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -252,6 +252,8 @@ class AfterGenerateClass: cls.cls.bases = ["VectorIndexMixin"] elif cls.cls.name == "DynamicTableRegion": cls.cls.bases = ["DynamicTableRegionMixin", "VectorData"] + elif cls.cls.name == "AlignedDynamicTable": + cls.cls.bases = ["AlignedDynamicTableMixin", "DynamicTable"] return cls diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index c34bf9a..addc32c 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -18,7 +18,7 @@ from typing import ( import numpy as np from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport from numpydantic import NDArray, Shape -from pandas import DataFrame, Series +import pandas as pd from pydantic import ( BaseModel, ConfigDict, @@ -66,21 +66,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -131,7 +131,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -149,7 +149,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -180,6 +180,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -467,11 +475,70 @@ class DynamicTableRegionMixin(BaseModel): self.table[self.value[key]] = value +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + DYNAMIC_TABLE_IMPORTS = Imports( imports=[ - Import( - module="pandas", objects=[ObjectImport(name="DataFrame"), ObjectImport(name="Series")] - ), + Import(module="pandas", alias="pd"), Import( module="typing", objects=[ @@ -508,4 +575,5 @@ DYNAMIC_TABLE_INJECTS = [ VectorIndexMixin, DynamicTableRegionMixin, DynamicTableMixin, + AlignedDynamicTableMixin, ] diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 1c07ed3..0c982dd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -4,7 +4,7 @@ from decimal import Decimal from enum import Enum import re import sys -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( @@ -244,21 +244,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -309,7 +309,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -327,7 +327,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -358,6 +358,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -492,6 +500,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 36a79bb..9a2696f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -4,7 +4,7 @@ from decimal import Decimal from enum import Enum import re import sys -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( @@ -244,21 +244,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -309,7 +309,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -327,7 +327,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -358,6 +358,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -492,6 +500,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index a5477d8..35fe280 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -4,7 +4,7 @@ from decimal import Decimal from enum import Enum import re import sys -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( @@ -244,21 +244,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -309,7 +309,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -327,7 +327,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -358,6 +358,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -492,6 +500,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index bd03453..d12fd85 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( @@ -245,21 +245,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -310,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -328,7 +328,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -359,6 +359,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -493,6 +501,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 82c84bf..1bfb82c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( @@ -245,21 +245,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -310,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -328,7 +328,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -359,6 +359,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -493,6 +501,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 23f75ee..b19b63a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( @@ -245,21 +245,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -310,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -328,7 +328,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -359,6 +359,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -493,6 +501,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index e5d4abc..b3d2f89 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from numpydantic import NDArray, Shape from pydantic import ( @@ -245,21 +245,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -310,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -328,7 +328,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -359,6 +359,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -493,6 +501,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 46796a1..53e9996 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, @@ -245,21 +245,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -310,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -328,7 +328,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -359,6 +359,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -493,6 +501,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { @@ -617,7 +686,7 @@ class DynamicTable(DynamicTableMixin): ) -class AlignedDynamicTable(DynamicTable): +class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): """ DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 9880ee8..641f20c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, @@ -245,21 +245,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -310,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -328,7 +328,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -359,6 +359,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -493,6 +501,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { @@ -617,7 +686,7 @@ class DynamicTable(DynamicTableMixin): ) -class AlignedDynamicTable(DynamicTable): +class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): """ DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index 158f8c1..45e4269 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, @@ -245,21 +245,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -310,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -328,7 +328,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -359,6 +359,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -493,6 +501,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { @@ -617,7 +686,7 @@ class DynamicTable(DynamicTableMixin): ) -class AlignedDynamicTable(DynamicTable): +class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): """ DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 3ffb25d..0ca3130 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, @@ -245,21 +245,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -310,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -328,7 +328,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -359,6 +359,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -493,6 +501,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { @@ -617,7 +686,7 @@ class DynamicTable(DynamicTableMixin): ) -class AlignedDynamicTable(DynamicTable): +class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): """ DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index de43571..d6e1081 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -5,7 +5,7 @@ from enum import Enum import re import sys from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container -from pandas import DataFrame, Series +import pandas as pd from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, @@ -245,21 +245,21 @@ class DynamicTableMixin(BaseModel): def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @overload - def __getitem__(self, item: int) -> DataFrame: ... + def __getitem__(self, item: int) -> pd.DataFrame: ... @overload def __getitem__(self, item: Tuple[int, Union[int, str]]) -> Any: ... @overload def __getitem__(self, item: Tuple[Union[int, slice], ...]) -> Union[ - DataFrame, + pd.DataFrame, list, "NDArray", "VectorDataMixin", ]: ... @overload - def __getitem__(self, item: Union[slice, "NDArray"]) -> DataFrame: ... + def __getitem__(self, item: Union[slice, "NDArray"]) -> pd.DataFrame: ... def __getitem__( self, @@ -310,7 +310,7 @@ class DynamicTableMixin(BaseModel): raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return DataFrame(data) + return pd.DataFrame(data) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -328,7 +328,7 @@ class DynamicTableMixin(BaseModel): # scalars need to be wrapped in series for pandas if not isinstance(rows, (Iterable, slice)): - val = Series([val]) + val = pd.Series([val]) data[k] = val return data @@ -359,6 +359,14 @@ class DynamicTableMixin(BaseModel): except AttributeError: raise e from None + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + @model_validator(mode="before") @classmethod def create_id(cls, model: Dict[str, Any]) -> Dict: @@ -493,6 +501,67 @@ class DynamicTableMixin(BaseModel): ) +class AlignedDynamicTableMixin(DynamicTableMixin): + """ + Mixin to allow indexing multiple tables that are aligned on a common ID + """ + + __pydantic_extra__: Dict[str, "DynamicTableMixin"] + + NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( + "name", + "categories", + "colnames", + "description", + ) + + name: str = "aligned_table" + categories: List[str] = Field(default_factory=list) + id: Optional[NDArray[Shape["* num_rows"], int]] = None + + @property + def _categories(self) -> Dict[str, "DynamicTableMixin"]: + return {k: getattr(self, k) for i, k in enumerate(self.categories)} + + def __getitem__( + self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + ) -> pd.DataFrame: + """ + Mimic hdmf: + + https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/common/alignedtable.py#L261 + Args: + item: + + Returns: + + """ + if isinstance(item, str): + # get a single table + return self._categories[item][:] + elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): + # get a slice of a single table + return self._categories[item[1]][item[0]] + elif isinstance(item, (int, slice)): + # get a slice of all the tables + ids = self.id[item] + if not isinstance(ids, Iterable): + ids = pd.Series([ids]) + ids = pd.DataFrame({"id": ids}) + tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + names = [self.name] + self.categories + # construct below in case we need to support array indexing in the future + else: + raise ValueError( + f"Dont know how to index with {item}, " + "need an int, string, slice, or tuple[int | slice, str]" + ) + + df = pd.concat(tables, axis=1, keys=names) + df.set_index((self.name, "id"), drop=True, inplace=True) + return df + + linkml_meta = LinkMLMeta( { "annotations": { @@ -617,7 +686,7 @@ class DynamicTable(DynamicTableMixin): ) -class AlignedDynamicTable(DynamicTable): +class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): """ DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. """ From c09b633cdaba1225bd49e3ad92288c4708c692f9 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 8 Aug 2024 00:32:24 -0700 Subject: [PATCH 35/61] lint --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index addc32c..77b484c 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -16,9 +16,9 @@ from typing import ( ) import numpy as np +import pandas as pd from linkml.generators.pydanticgen.template import Import, Imports, ObjectImport from numpydantic import NDArray, Shape -import pandas as pd from pydantic import ( BaseModel, ConfigDict, @@ -236,7 +236,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. From 0452a4359fa123c8fec948291aa8570b3d2426c0 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 12 Aug 2024 18:48:59 -0700 Subject: [PATCH 36/61] add logging. less janky adapter instantiation using model validators. correctly propagate properties from ancestor classes when building --- nwb_linkml/pyproject.toml | 4 +- nwb_linkml/src/nwb_linkml/adapters/adapter.py | 42 ++++-- nwb_linkml/src/nwb_linkml/adapters/classes.py | 9 +- nwb_linkml/src/nwb_linkml/adapters/dataset.py | 11 ++ .../src/nwb_linkml/adapters/namespaces.py | 60 ++++++--- nwb_linkml/src/nwb_linkml/adapters/schema.py | 3 +- nwb_linkml/src/nwb_linkml/config.py | 69 ++++++++++ nwb_linkml/src/nwb_linkml/io/schema.py | 10 +- nwb_linkml/src/nwb_linkml/logging.py | 100 +++++++++++++++ nwb_linkml/tests/fixtures.py | 1 - .../test_adapters/test_adapter_namespaces.py | 14 ++ nwb_linkml/tests/test_generate.py | 2 + nwb_linkml/tests/test_includes/conftest.py | 120 ++++++++++++++++++ nwb_linkml/tests/test_includes/test_hdmf.py | 92 +------------- .../datamodel/nwb_schema_pydantic.py | 18 +-- 15 files changed, 415 insertions(+), 140 deletions(-) create mode 100644 nwb_linkml/src/nwb_linkml/logging.py create mode 100644 nwb_linkml/tests/test_includes/conftest.py diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index 6efc111..97e4cce 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -74,7 +74,9 @@ addopts = [ ] markers = [ "dev: tests that are just for development rather than testing correctness", - "provider: tests for providers!" + "provider: tests for providers!", + "linkml: tests related to linkml generation", + "pydantic: tests related to pydantic generation" ] testpaths = [ "src/nwb_linkml", diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index 72f4248..e09e68f 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -5,16 +5,8 @@ Base class for adapters import sys from abc import abstractmethod from dataclasses import dataclass, field -from typing import ( - Any, - Generator, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) +from typing import Any, Generator, List, Literal, Optional, Tuple, Type, TypeVar, Union, overload +from logging import Logger from linkml_runtime.dumpers import yaml_dumper from linkml_runtime.linkml_model import ( @@ -26,6 +18,7 @@ from linkml_runtime.linkml_model import ( ) from pydantic import BaseModel +from nwb_linkml.logging import init_logger from nwb_schema_language import Attribute, CompoundDtype, Dataset, Group, Schema if sys.version_info.minor >= 11: @@ -107,6 +100,14 @@ class BuildResult: class Adapter(BaseModel): """Abstract base class for adapters""" + _logger: Optional[Logger] = None + + @property + def logger(self) -> Logger: + if self._logger is None: + self._logger = init_logger(self.__class__.__name__) + return self._logger + @abstractmethod def build(self) -> "BuildResult": """ @@ -152,8 +153,8 @@ class Adapter(BaseModel): # SchemaAdapters that should be located under the same # NamespacesAdapter when it's important to query across SchemaAdapters, # so skip to avoid combinatoric walking - if key == "imports" and type(input).__name__ == "SchemaAdapter": - continue + # if key == "imports" and type(input).__name__ == "SchemaAdapter": + # continue val = getattr(input, key) yield (key, val) if isinstance(val, (BaseModel, dict, list)): @@ -196,6 +197,14 @@ class Adapter(BaseModel): if isinstance(item, tuple) and item[0] in field and item[1] is not None: yield item[1] + @overload + def walk_field_values( + self, + input: Union[BaseModel, dict, list], + field: Literal["neurodata_type_def"], + value: Optional[Any] = None, + ) -> Generator[Group | Dataset, None, None]: ... + def walk_field_values( self, input: Union[BaseModel, dict, list], field: str, value: Optional[Any] = None ) -> Generator[BaseModel, None, None]: @@ -248,6 +257,9 @@ def is_1d(cls: Dataset | Attribute) -> bool: * a single-layer dim/shape list of length 1, or * a nested dim/shape list where every nested spec is of length 1 """ + if cls.dims is None: + return False + return ( not any([isinstance(dim, list) for dim in cls.dims]) and len(cls.dims) == 1 ) or ( # nested list @@ -270,4 +282,8 @@ def has_attrs(cls: Dataset) -> bool: """ Check if a dataset has any attributes at all without defaults """ - return len(cls.attributes) > 0 and all([not a.value for a in cls.attributes]) + return ( + cls.attributes is not None + and len(cls.attributes) > 0 + and all([not a.value for a in cls.attributes]) + ) diff --git a/nwb_linkml/src/nwb_linkml/adapters/classes.py b/nwb_linkml/src/nwb_linkml/adapters/classes.py index 054a401..0097e47 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/classes.py +++ b/nwb_linkml/src/nwb_linkml/adapters/classes.py @@ -119,9 +119,12 @@ class ClassAdapter(Adapter): Returns: list[:class:`.SlotDefinition`] """ - results = [AttributeAdapter(cls=attr).build() for attr in cls.attributes] - slots = [r.slots[0] for r in results] - return slots + if cls.attributes is not None: + results = [AttributeAdapter(cls=attr).build() for attr in cls.attributes] + slots = [r.slots[0] for r in results] + return slots + else: + return [] def _get_full_name(self) -> str: """The full name of the object in the generated linkml diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py index 3a49798..2490ef5 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py +++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py @@ -784,6 +784,12 @@ class MapCompoundDtype(DatasetMap): Make a new class for this dtype, using its sub-dtypes as fields, and use it as the range for the parent class """ + # all the slots share the same ndarray spec if there is one + array = {} + if cls.dims or cls.shape: + array_adapter = ArrayAdapter(cls.dims, cls.shape) + array = array_adapter.make_slot() + slots = {} for a_dtype in cls.dtype: slots[a_dtype.name] = SlotDefinition( @@ -791,8 +797,13 @@ class MapCompoundDtype(DatasetMap): description=a_dtype.doc, range=handle_dtype(a_dtype.dtype), **QUANTITY_MAP[cls.quantity], + **array, ) res.classes[0].attributes.update(slots) + + # the compound dtype replaces the ``value`` slot, if present + if "value" in res.classes[0].attributes: + del res.classes[0].attributes["value"] return res diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index f8ea857..59194e4 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -13,13 +13,13 @@ from typing import Dict, List, Optional from linkml_runtime.dumpers import yaml_dumper from linkml_runtime.linkml_model import Annotation, SchemaDefinition -from pydantic import Field, PrivateAttr +from pydantic import Field, model_validator from nwb_linkml.adapters.adapter import Adapter, BuildResult from nwb_linkml.adapters.schema import SchemaAdapter from nwb_linkml.lang_elements import NwbLangSchema from nwb_linkml.ui import AdapterProgress -from nwb_schema_language import Namespaces +from nwb_schema_language import Namespaces, Group, Dataset class NamespacesAdapter(Adapter): @@ -31,12 +31,6 @@ class NamespacesAdapter(Adapter): schemas: List[SchemaAdapter] imported: List["NamespacesAdapter"] = Field(default_factory=list) - _imports_populated: bool = PrivateAttr(False) - - def __init__(self, **kwargs: dict): - super().__init__(**kwargs) - self._populate_schema_namespaces() - @classmethod def from_yaml(cls, path: Path) -> "NamespacesAdapter": """ @@ -70,8 +64,6 @@ class NamespacesAdapter(Adapter): """ Build the NWB namespace to the LinkML Schema """ - if not self._imports_populated and not skip_imports: - self.populate_imports() sch_result = BuildResult() for sch in self.schemas: @@ -129,6 +121,7 @@ class NamespacesAdapter(Adapter): return sch_result + @model_validator(mode="after") def _populate_schema_namespaces(self) -> None: """ annotate for each schema which namespace imports it @@ -143,6 +136,7 @@ class NamespacesAdapter(Adapter): sch.namespace = ns.name sch.version = ns.version break + return self def find_type_source(self, name: str) -> SchemaAdapter: """ @@ -182,7 +176,8 @@ class NamespacesAdapter(Adapter): else: raise KeyError(f"No schema found that define {name}") - def populate_imports(self) -> None: + @model_validator(mode="after") + def populate_imports(self) -> "NamespacesAdapter": """ Populate the imports that are needed for each schema file @@ -199,11 +194,46 @@ class NamespacesAdapter(Adapter): if depends_on not in sch.imports: sch.imports.append(depends_on) - # do so recursively - for imported in self.imported: - imported.populate_imports() + return self - self._imports_populated = True + @model_validator(mode="after") + def _populate_inheritance(self): + """ + ensure properties from `neurodata_type_inc` are propaged through to inheriting classes. + + This seems super expensive but we'll optimize for perf later if that proves to be the case + """ + # don't use walk_types here so we can replace the objects as we mutate them + for sch in self.schemas: + for i, group in enumerate(sch.groups): + if getattr(group, "neurodata_type_inc", None) is not None: + merged_attrs = self._merge_inheritance(group) + sch.groups[i] = Group(**merged_attrs) + for i, dataset in enumerate(sch.datasets): + if getattr(dataset, "neurodata_type_inc", None) is not None: + merged_attrs = self._merge_inheritance(dataset) + sch.datasets[i] = Dataset(**merged_attrs) + return self + + def _merge_inheritance(self, obj: Group | Dataset) -> dict: + obj_dict = obj.model_dump(exclude_none=True) + if obj.neurodata_type_inc: + name = obj.neurodata_type_def if obj.neurodata_type_def else obj.name + self.logger.debug(f"Merging {name} with {obj.neurodata_type_inc}") + # there must be only one type with this name + parent: Group | Dataset = next( + self.walk_field_values(self, "neurodata_type_def", obj.neurodata_type_inc) + ) + if obj.neurodata_type_def == "TimeSeriesReferenceVectorData": + pdb.set_trace() + parent_dict = copy(self._merge_inheritance(parent)) + # children don't inherit the type_def + del parent_dict["neurodata_type_def"] + # overwrite with child values + parent_dict.update(obj_dict) + return parent_dict + + return obj_dict def to_yaml(self, base_dir: Path) -> None: """ diff --git a/nwb_linkml/src/nwb_linkml/adapters/schema.py b/nwb_linkml/src/nwb_linkml/adapters/schema.py index 4f03944..e6316b7 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/schema.py +++ b/nwb_linkml/src/nwb_linkml/adapters/schema.py @@ -42,7 +42,8 @@ class SchemaAdapter(Adapter): """ The namespace.schema name for a single schema """ - return ".".join([self.namespace, self.path.with_suffix("").name]) + namespace = self.namespace if self.namespace is not None else "" + return ".".join([namespace, self.path.with_suffix("").name]) def __repr__(self): out_str = "\n" + self.name + "\n" diff --git a/nwb_linkml/src/nwb_linkml/config.py b/nwb_linkml/src/nwb_linkml/config.py index 8fa84f7..bbfcaed 100644 --- a/nwb_linkml/src/nwb_linkml/config.py +++ b/nwb_linkml/src/nwb_linkml/config.py @@ -2,10 +2,12 @@ Manage the operation of nwb_linkml from environmental variables """ +from typing import Optional, Literal import tempfile from pathlib import Path from pydantic import ( + BaseModel, DirectoryPath, Field, FieldValidationInfo, @@ -15,15 +17,68 @@ from pydantic import ( ) from pydantic_settings import BaseSettings, SettingsConfigDict +LOG_LEVELS = Literal["DEBUG", "INFO", "WARNING", "ERROR"] + + +class LogConfig(BaseModel): + """ + Configuration for logging + """ + + level: LOG_LEVELS = "INFO" + """ + Severity of log messages to process. + """ + level_file: Optional[LOG_LEVELS] = None + """ + Severity for file-based logging. If unset, use ``level`` + """ + level_stdout: Optional[LOG_LEVELS] = "WARNING" + """ + Severity for stream-based logging. If unset, use ``level`` + """ + file_n: int = 5 + """ + Number of log files to rotate through + """ + file_size: int = 2**22 # roughly 4MB + """ + Maximum size of log files (bytes) + """ + + @field_validator("level", "level_file", "level_stdout", mode="before") + @classmethod + def uppercase_levels(cls, value: Optional[str] = None) -> Optional[str]: + """ + Ensure log level strings are uppercased + """ + if value is not None: + value = value.upper() + return value + + @model_validator(mode="after") + def inherit_base_level(self) -> "LogConfig": + """ + If loglevels for specific output streams are unset, set from base :attr:`.level` + """ + levels = ("level_file", "level_stdout") + for level_name in levels: + if getattr(self, level_name) is None: + setattr(self, level_name, self.level) + return self + class Config(BaseSettings): """ Configuration for nwb_linkml, populated by default but can be overridden by environment variables. + Nested models can be assigned from .env files with a __ (see examples) + Examples: export NWB_LINKML_CACHE_DIR="/home/mycache/dir" + export NWB_LINKML_LOGS__LEVEL="debug" """ @@ -32,6 +87,11 @@ class Config(BaseSettings): default_factory=lambda: Path(tempfile.gettempdir()) / "nwb_linkml__cache", description="Location to cache generated schema and models", ) + log_dir: Path = Field( + Path("logs"), + description="Location to store logs. If a relative directory, relative to ``cache_dir``", + ) + logs: LogConfig = Field(LogConfig(), description="Log configuration") @computed_field @property @@ -62,6 +122,15 @@ class Config(BaseSettings): assert v.exists() return v + @model_validator(mode="after") + def log_dir_relative_to_cache_dir(self) -> "Config": + """ + If log dir is relative, put it beneath the cache_dir + """ + if not self.log_dir.is_absolute(): + self.log_dir = self.cache_dir / self.log_dir + return self + @model_validator(mode="after") def folders_exist(self) -> "Config": """ diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py index a162856..954fb3a 100644 --- a/nwb_linkml/src/nwb_linkml/io/schema.py +++ b/nwb_linkml/src/nwb_linkml/io/schema.py @@ -70,6 +70,7 @@ def load_namespace_adapter( namespace: Path | NamespaceRepo | Namespaces, path: Optional[Path] = None, version: Optional[str] = None, + imported: Optional[list[NamespacesAdapter]] = None, ) -> NamespacesAdapter: """ Load all schema referenced by a namespace file @@ -115,7 +116,10 @@ def load_namespace_adapter( yml_file = (path / schema.source).resolve() sch.append(load_schema_file(yml_file)) - adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch) + if imported is not None: + adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch, imported=imported) + else: + adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch) return adapter @@ -148,8 +152,6 @@ def load_nwb_core( if hdmf_only: schema = hdmf_schema else: - schema = load_namespace_adapter(NWB_CORE_REPO, version=core_version) - - schema.imported.append(hdmf_schema) + schema = load_namespace_adapter(NWB_CORE_REPO, version=core_version, imported=[hdmf_schema]) return schema diff --git a/nwb_linkml/src/nwb_linkml/logging.py b/nwb_linkml/src/nwb_linkml/logging.py new file mode 100644 index 0000000..35e4425 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/logging.py @@ -0,0 +1,100 @@ +""" +Logging factory and handlers +""" + +import logging +from logging.handlers import RotatingFileHandler +from pathlib import Path +from typing import Optional, Union + +from rich.logging import RichHandler + +from nwb_linkml.config import LOG_LEVELS, Config + + +def init_logger( + name: str, + log_dir: Union[Optional[Path], bool] = None, + level: Optional[LOG_LEVELS] = None, + file_level: Optional[LOG_LEVELS] = None, + log_file_n: Optional[int] = None, + log_file_size: Optional[int] = None, +) -> logging.Logger: + """ + Make a logger. + + Log to a set of rotating files in the ``log_dir`` according to ``name`` , + as well as using the :class:`~rich.RichHandler` for pretty-formatted stdout logs. + + Args: + name (str): Name of this logger. Ideally names are hierarchical + and indicate what they are logging for, eg. ``miniscope_io.sdcard`` + and don't contain metadata like timestamps, etc. (which are in the logs) + log_dir (:class:`pathlib.Path`): Directory to store file-based logs in. If ``None``, + get from :class:`.Config`. If ``False`` , disable file logging. + level (:class:`.LOG_LEVELS`): Level to use for stdout logging. If ``None`` , + get from :class:`.Config` + file_level (:class:`.LOG_LEVELS`): Level to use for file-based logging. + If ``None`` , get from :class:`.Config` + log_file_n (int): Number of rotating file logs to use. + If ``None`` , get from :class:`.Config` + log_file_size (int): Maximum size of logfiles before rotation. + If ``None`` , get from :class:`.Config` + + Returns: + :class:`logging.Logger` + """ + config = Config() + if log_dir is None: + log_dir = config.log_dir + if level is None: + level = config.logs.level_stdout + if file_level is None: + file_level = config.logs.level_file + if log_file_n is None: + log_file_n = config.logs.file_n + if log_file_size is None: + log_file_size = config.logs.file_size + + if not name.startswith("nwb_linkml"): + name = "nwb_linkml." + name + + logger = logging.getLogger(name) + logger.setLevel(level) + + # Add handlers for stdout and file + if log_dir is not False: + logger.addHandler(_file_handler(name, file_level, log_dir, log_file_n, log_file_size)) + + logger.addHandler(_rich_handler()) + + return logger + + +def _file_handler( + name: str, + file_level: LOG_LEVELS, + log_dir: Path, + log_file_n: int = 5, + log_file_size: int = 2**22, +) -> RotatingFileHandler: + # See init_logger for arg docs + + filename = Path(log_dir) / ".".join([name, "log"]) + file_handler = RotatingFileHandler( + str(filename), mode="a", maxBytes=log_file_size, backupCount=log_file_n + ) + file_formatter = logging.Formatter("[%(asctime)s] %(levelname)s [%(name)s]: %(message)s") + file_handler.setLevel(file_level) + file_handler.setFormatter(file_formatter) + return file_handler + + +def _rich_handler() -> RichHandler: + rich_handler = RichHandler(rich_tracebacks=True, markup=True) + rich_formatter = logging.Formatter( + "[bold green]\[%(name)s][/bold green] %(message)s", + datefmt="[%y-%m-%dT%H:%M:%S]", + ) + rich_handler.setFormatter(rich_formatter) + return rich_handler diff --git a/nwb_linkml/tests/fixtures.py b/nwb_linkml/tests/fixtures.py index 3ab2d3c..a38e3e0 100644 --- a/nwb_linkml/tests/fixtures.py +++ b/nwb_linkml/tests/fixtures.py @@ -82,7 +82,6 @@ def tmp_output_dir_mod(tmp_output_dir) -> Path: @pytest.fixture(scope="session", params=[{"core_version": "2.7.0", "hdmf_version": "1.8.0"}]) def nwb_core_fixture(request) -> NamespacesAdapter: nwb_core = io.load_nwb_core(**request.param) - nwb_core.populate_imports() assert ( request.param["core_version"] in nwb_core.versions["core"] ) # 2.6.0 is actually 2.6.0-alpha diff --git a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py index 5124bdd..bbcb739 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py +++ b/nwb_linkml/tests/test_adapters/test_adapter_namespaces.py @@ -46,3 +46,17 @@ def test_skip_imports(nwb_core_fixture): # we shouldn't have any of the hdmf-common schema in with us namespaces = [sch.annotations["namespace"].value for sch in res.schemas] assert all([ns == "core" for ns in namespaces]) + + +@pytest.mark.skip() +def test_populate_inheritance(nwb_core_fixture): + """ + Classes should receive and override the properties of their parents + when they have neurodata_type_inc + Args: + nwb_core_fixture: + + Returns: + + """ + pass diff --git a/nwb_linkml/tests/test_generate.py b/nwb_linkml/tests/test_generate.py index 70b08bc..529cdd1 100644 --- a/nwb_linkml/tests/test_generate.py +++ b/nwb_linkml/tests/test_generate.py @@ -76,6 +76,7 @@ def test_generate_pydantic(tmp_output_dir): initfile.write("# Autogenerated module indicator") +@pytest.mark.linkml @pytest.mark.provider @pytest.mark.dev def test_generate_linkml_provider(tmp_output_dir, nwb_core_fixture): @@ -84,6 +85,7 @@ def test_generate_linkml_provider(tmp_output_dir, nwb_core_fixture): result = provider.build(nwb_core_fixture) +@pytest.mark.pydantic @pytest.mark.provider @pytest.mark.dev def test_generate_pydantic_provider(tmp_output_dir): diff --git a/nwb_linkml/tests/test_includes/conftest.py b/nwb_linkml/tests/test_includes/conftest.py new file mode 100644 index 0000000..9eacd9f --- /dev/null +++ b/nwb_linkml/tests/test_includes/conftest.py @@ -0,0 +1,120 @@ +from typing import Tuple + +import numpy as np +import pytest + +from nwb_linkml.models import ( + ElectricalSeries, + ExtracellularEphysElectrodes, + Device, + ElectrodeGroup, + DynamicTableRegion, + Units, + IntracellularElectrode, + IntracellularElectrodesTable, + IntracellularResponsesTable, + IntracellularStimuliTable, + IntracellularRecordingsTable, +) + + +@pytest.fixture() +def electrical_series() -> Tuple["ElectricalSeries", "ExtracellularEphysElectrodes"]: + """ + Demo electrical series with adjoining electrodes + """ + n_electrodes = 5 + n_times = 100 + data = np.arange(0, n_electrodes * n_times).reshape(n_times, n_electrodes).astype(float) + timestamps = np.linspace(0, 1, n_times) + + device = Device(name="my electrode") + + # electrode group is the physical description of the electrodes + electrode_group = ElectrodeGroup( + name="GroupA", + device=device, + description="an electrode group", + location="you know where it is", + ) + + # make electrodes tables + electrodes = ExtracellularEphysElectrodes( + description="idk these are also electrodes", + id=np.arange(0, n_electrodes), + x=np.arange(0, n_electrodes).astype(float), + y=np.arange(n_electrodes, n_electrodes * 2).astype(float), + group=[electrode_group] * n_electrodes, + group_name=[electrode_group.name] * n_electrodes, + location=[str(i) for i in range(n_electrodes)], + extra_column=["sup"] * n_electrodes, + ) + + electrical_series = ElectricalSeries( + name="my recording!", + electrodes=DynamicTableRegion( + table=electrodes, + value=np.arange(n_electrodes - 1, -1, step=-1), + name="electrodes", + description="hey", + ), + timestamps=timestamps, + data=data, + ) + return electrical_series, electrodes + + +def _ragged_array(n_units: int) -> tuple[list[np.ndarray], np.ndarray]: + generator = np.random.default_rng() + spike_times = [ + np.full(shape=generator.integers(10, 50), fill_value=i, dtype=float) for i in range(n_units) + ] + spike_idx = [] + for i in range(n_units): + if i == 0: + spike_idx.append(len(spike_times[0])) + else: + spike_idx.append(len(spike_times[i]) + spike_idx[i - 1]) + spike_idx = np.array(spike_idx) + return spike_times, spike_idx + + +@pytest.fixture(params=[True, False]) +def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]: + """ + Test case for units + + Parameterized by extra_column because pandas likes to pivot dataframes + to long when there is only one column and it's not len() == 1 + """ + spike_times, spike_idx = _ragged_array(24) + + spike_times_flat = np.concatenate(spike_times) + + kwargs = { + "description": "units!!!!", + "spike_times": spike_times_flat, + "spike_times_index": spike_idx, + } + if request.param: + kwargs["extra_column"] = ["hey!"] * 24 + units = Units(**kwargs) + return units, spike_times, spike_idx + + +@pytest.fixture() +def intracellular_recordings_table() -> IntracellularRecordingsTable: + n_recordings = 10 + device = Device(name="my device") + electrode = IntracellularElectrode( + name="my_electrode", description="an electrode", device=device + ) + electrodes = IntracellularElectrodesTable( + name="intracellular_electrodes", electrode=[electrode] * n_recordings + ) + stimuli = IntracellularStimuliTable( + name="intracellular_stimuli", + ) + responses = IntracellularResponsesTable() + + recordings_table = IntracellularRecordingsTable() diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index e00c02e..b21e51a 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -1,103 +1,13 @@ -from typing import Tuple - import numpy as np -import pytest # FIXME: Make this just be the output of the provider by patching into import machinery from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( - Device, DynamicTable, DynamicTableRegion, - ElectricalSeries, ElectrodeGroup, - ExtracellularEphysElectrodes, - Units, VectorIndex, ) - - -@pytest.fixture() -def electrical_series() -> Tuple["ElectricalSeries", "ExtracellularEphysElectrodes"]: - """ - Demo electrical series with adjoining electrodes - """ - n_electrodes = 5 - n_times = 100 - data = np.arange(0, n_electrodes * n_times).reshape(n_times, n_electrodes).astype(float) - timestamps = np.linspace(0, 1, n_times) - - device = Device(name="my electrode") - - # electrode group is the physical description of the electrodes - electrode_group = ElectrodeGroup( - name="GroupA", - device=device, - description="an electrode group", - location="you know where it is", - ) - - # make electrodes tables - electrodes = ExtracellularEphysElectrodes( - description="idk these are also electrodes", - id=np.arange(0, n_electrodes), - x=np.arange(0, n_electrodes).astype(float), - y=np.arange(n_electrodes, n_electrodes * 2).astype(float), - group=[electrode_group] * n_electrodes, - group_name=[electrode_group.name] * n_electrodes, - location=[str(i) for i in range(n_electrodes)], - extra_column=["sup"] * n_electrodes, - ) - - electrical_series = ElectricalSeries( - name="my recording!", - electrodes=DynamicTableRegion( - table=electrodes, - value=np.arange(n_electrodes - 1, -1, step=-1), - name="electrodes", - description="hey", - ), - timestamps=timestamps, - data=data, - ) - return electrical_series, electrodes - - -def _ragged_array(n_units: int) -> tuple[list[np.ndarray], np.ndarray]: - generator = np.random.default_rng() - spike_times = [ - np.full(shape=generator.integers(10, 50), fill_value=i, dtype=float) for i in range(n_units) - ] - spike_idx = [] - for i in range(n_units): - if i == 0: - spike_idx.append(len(spike_times[0])) - else: - spike_idx.append(len(spike_times[i]) + spike_idx[i - 1]) - spike_idx = np.array(spike_idx) - return spike_times, spike_idx - - -@pytest.fixture(params=[True, False]) -def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]: - """ - Test case for units - - Parameterized by extra_column because pandas likes to pivot dataframes - to long when there is only one column and it's not len() == 1 - """ - spike_times, spike_idx = _ragged_array(24) - - spike_times_flat = np.concatenate(spike_times) - - kwargs = { - "description": "units!!!!", - "spike_times": spike_times_flat, - "spike_times_index": spike_idx, - } - if request.param: - kwargs["extra_column"] = ["hey!"] * 24 - units = Units(**kwargs) - return units, spike_times, spike_idx +from .conftest import _ragged_array def test_dynamictable_indexing(electrical_series): diff --git a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py index ef04312..84132d0 100644 --- a/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py +++ b/nwb_schema_language/src/nwb_schema_language/datamodel/nwb_schema_pydantic.py @@ -220,8 +220,8 @@ class DtypeMixin(ConfiguredBaseModel): class Attribute(DtypeMixin): name: str = Field(...) - dims: Optional[List[Union[Any, str]]] = Field(default_factory=list) - shape: Optional[List[Union[Any, int, str]]] = Field(default_factory=list) + dims: Optional[List[Union[Any, str]]] = Field(None) + shape: Optional[List[Union[Any, int, str]]] = Field(None) value: Optional[Any] = Field( None, description="""Optional constant, fixed value for the attribute.""" ) @@ -233,9 +233,7 @@ class Attribute(DtypeMixin): True, description="""Optional boolean key describing whether the attribute is required. Default value is True.""", ) - dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field( - default_factory=list - ) + dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(None) class Dataset(DtypeMixin): @@ -250,8 +248,8 @@ class Dataset(DtypeMixin): ) name: Optional[str] = Field(None) default_name: Optional[str] = Field(None) - dims: Optional[List[Union[Any, str]]] = Field(default_factory=list) - shape: Optional[List[Union[Any, int, str]]] = Field(default_factory=list) + dims: Optional[List[Union[Any, str]]] = Field(None) + shape: Optional[List[Union[Any, int, str]]] = Field(None) value: Optional[Any] = Field( None, description="""Optional constant, fixed value for the attribute.""" ) @@ -261,7 +259,5 @@ class Dataset(DtypeMixin): doc: str = Field(..., description="""Description of corresponding object.""") quantity: Optional[Union[QuantityEnum, int]] = Field(1) linkable: Optional[bool] = Field(None) - attributes: Optional[List[Attribute]] = Field(default_factory=list) - dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field( - default_factory=list - ) + attributes: Optional[List[Attribute]] = Field(None) + dtype: Optional[Union[List[CompoundDtype], FlatDtype, ReferenceDtype]] = Field(None) From 994b79e0f2126d2aa04d48e83ddcfb010f4ab52f Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 12 Aug 2024 19:25:12 -0700 Subject: [PATCH 37/61] actually no that's stupid, linkml handles inheritance except for the one special case of compound dtypes which aren't a thing in linkml and are here used exclusively for 1d vectors. --- nwb_linkml/src/nwb_linkml/adapters/dataset.py | 13 +++--- .../src/nwb_linkml/adapters/namespaces.py | 41 +------------------ scripts/generate_core.py | 15 ++++--- 3 files changed, 16 insertions(+), 53 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/adapters/dataset.py b/nwb_linkml/src/nwb_linkml/adapters/dataset.py index 2490ef5..ef5eb61 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/dataset.py +++ b/nwb_linkml/src/nwb_linkml/adapters/dataset.py @@ -742,6 +742,10 @@ class MapCompoundDtype(DatasetMap): We render them just as a class with each of the dtypes as slots - they are typically used by other datasets to create a table. + Since there is exactly one class (``TimeSeriesReferenceVectorData``) that uses compound dtypes + meaningfully, we just hardcode the behavior of inheriting the array shape from the VectorData + parent classes. Otherwise, linkml schemas correctly propagate the ``value`` property. + Eg. ``base.TimeSeriesReferenceVectorData`` .. code-block:: yaml @@ -784,24 +788,17 @@ class MapCompoundDtype(DatasetMap): Make a new class for this dtype, using its sub-dtypes as fields, and use it as the range for the parent class """ - # all the slots share the same ndarray spec if there is one - array = {} - if cls.dims or cls.shape: - array_adapter = ArrayAdapter(cls.dims, cls.shape) - array = array_adapter.make_slot() - slots = {} for a_dtype in cls.dtype: slots[a_dtype.name] = SlotDefinition( name=a_dtype.name, description=a_dtype.doc, range=handle_dtype(a_dtype.dtype), + array=ArrayExpression(exact_number_dimensions=1), **QUANTITY_MAP[cls.quantity], - **array, ) res.classes[0].attributes.update(slots) - # the compound dtype replaces the ``value`` slot, if present if "value" in res.classes[0].attributes: del res.classes[0].attributes["value"] return res diff --git a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py index 59194e4..266906e 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/namespaces.py +++ b/nwb_linkml/src/nwb_linkml/adapters/namespaces.py @@ -19,7 +19,7 @@ from nwb_linkml.adapters.adapter import Adapter, BuildResult from nwb_linkml.adapters.schema import SchemaAdapter from nwb_linkml.lang_elements import NwbLangSchema from nwb_linkml.ui import AdapterProgress -from nwb_schema_language import Namespaces, Group, Dataset +from nwb_schema_language import Namespaces class NamespacesAdapter(Adapter): @@ -196,45 +196,6 @@ class NamespacesAdapter(Adapter): return self - @model_validator(mode="after") - def _populate_inheritance(self): - """ - ensure properties from `neurodata_type_inc` are propaged through to inheriting classes. - - This seems super expensive but we'll optimize for perf later if that proves to be the case - """ - # don't use walk_types here so we can replace the objects as we mutate them - for sch in self.schemas: - for i, group in enumerate(sch.groups): - if getattr(group, "neurodata_type_inc", None) is not None: - merged_attrs = self._merge_inheritance(group) - sch.groups[i] = Group(**merged_attrs) - for i, dataset in enumerate(sch.datasets): - if getattr(dataset, "neurodata_type_inc", None) is not None: - merged_attrs = self._merge_inheritance(dataset) - sch.datasets[i] = Dataset(**merged_attrs) - return self - - def _merge_inheritance(self, obj: Group | Dataset) -> dict: - obj_dict = obj.model_dump(exclude_none=True) - if obj.neurodata_type_inc: - name = obj.neurodata_type_def if obj.neurodata_type_def else obj.name - self.logger.debug(f"Merging {name} with {obj.neurodata_type_inc}") - # there must be only one type with this name - parent: Group | Dataset = next( - self.walk_field_values(self, "neurodata_type_def", obj.neurodata_type_inc) - ) - if obj.neurodata_type_def == "TimeSeriesReferenceVectorData": - pdb.set_trace() - parent_dict = copy(self._merge_inheritance(parent)) - # children don't inherit the type_def - del parent_dict["neurodata_type_def"] - # overwrite with child values - parent_dict.update(obj_dict) - return parent_dict - - return obj_dict - def to_yaml(self, base_dir: Path) -> None: """ Build the schemas, saving them to ``yaml`` files according to diff --git a/scripts/generate_core.py b/scripts/generate_core.py index 35faf43..af33c37 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -104,14 +104,19 @@ def generate_versions( repo.tag = version build_progress.update(linkml_task, advance=1, action="Load Namespaces") - # first load the core namespace - core_ns = io.load_namespace_adapter(repo.namespace_file) if repo.namespace == NWB_CORE_REPO: - # then the hdmf-common namespace + # first load HDMF common hdmf_common_ns = io.load_namespace_adapter( repo.temp_directory / "hdmf-common-schema" / "common" / "namespace.yaml" ) - core_ns.imported.append(hdmf_common_ns) + # then load nwb core + core_ns = io.load_namespace_adapter( + repo.namespace_file, imported=[hdmf_common_ns] + ) + + else: + # otherwise just load HDMF + core_ns = io.load_namespace_adapter(repo.namespace_file) build_progress.update(linkml_task, advance=1, action="Build LinkML") @@ -169,7 +174,7 @@ def generate_versions( # import the most recent version of the schemaz we built latest_version = sorted( - (pydantic_path / "pydantic" / "core").iterdir(), key=os.path.getmtime + (pydantic_path / "pydantic" / "core").glob('v*'), key=os.path.getmtime )[-1] # make inits to use the schema! we don't usually do this in the From dd99ac24eb37f67fb61003510b6c042fc72b320c Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 12 Aug 2024 19:25:48 -0700 Subject: [PATCH 38/61] regenerate models --- .../pydantic/core/v2_2_0/core_nwb_ecephys.py | 20 ++++-- .../pydantic/core/v2_2_0/core_nwb_epoch.py | 16 +++-- .../pydantic/core/v2_2_0/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_0/core_nwb_ophys.py | 2 +- .../core/v2_2_0/core_nwb_retinotopy.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_ecephys.py | 20 ++++-- .../pydantic/core/v2_2_1/core_nwb_epoch.py | 16 +++-- .../pydantic/core/v2_2_1/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_1/core_nwb_ophys.py | 2 +- .../core/v2_2_1/core_nwb_retinotopy.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_ecephys.py | 20 ++++-- .../pydantic/core/v2_2_2/core_nwb_epoch.py | 14 +++-- .../pydantic/core/v2_2_2/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_2/core_nwb_ophys.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_ecephys.py | 20 ++++-- .../pydantic/core/v2_2_4/core_nwb_epoch.py | 14 +++-- .../pydantic/core/v2_2_4/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_4/core_nwb_ophys.py | 44 ++++++++++--- .../pydantic/core/v2_2_5/core_nwb_ecephys.py | 20 ++++-- .../pydantic/core/v2_2_5/core_nwb_epoch.py | 14 +++-- .../pydantic/core/v2_2_5/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_misc.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_ophys.py | 44 ++++++++++--- .../pydantic/core/v2_3_0/core_nwb_ecephys.py | 20 ++++-- .../pydantic/core/v2_3_0/core_nwb_epoch.py | 14 +++-- .../pydantic/core/v2_3_0/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_3_0/core_nwb_misc.py | 2 +- .../pydantic/core/v2_3_0/core_nwb_ophys.py | 44 ++++++++++--- .../pydantic/core/v2_4_0/core_nwb_base.py | 12 +++- .../pydantic/core/v2_4_0/core_nwb_ecephys.py | 20 ++++-- .../pydantic/core/v2_4_0/core_nwb_epoch.py | 14 +++-- .../pydantic/core/v2_4_0/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_4_0/core_nwb_misc.py | 2 +- .../pydantic/core/v2_4_0/core_nwb_ophys.py | 44 ++++++++++--- .../pydantic/core/v2_5_0/core_nwb_base.py | 14 +++-- .../pydantic/core/v2_5_0/core_nwb_ecephys.py | 20 ++++-- .../pydantic/core/v2_5_0/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_5_0/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_5_0/core_nwb_misc.py | 2 +- .../pydantic/core/v2_5_0/core_nwb_ophys.py | 44 ++++++++++--- .../core/v2_6_0_alpha/core_nwb_base.py | 14 +++-- .../core/v2_6_0_alpha/core_nwb_ecephys.py | 20 ++++-- .../core/v2_6_0_alpha/core_nwb_epoch.py | 2 +- .../core/v2_6_0_alpha/core_nwb_icephys.py | 2 +- .../core/v2_6_0_alpha/core_nwb_misc.py | 2 +- .../core/v2_6_0_alpha/core_nwb_ophys.py | 44 ++++++++++--- .../pydantic/core/v2_7_0/core_nwb_base.py | 14 +++-- .../pydantic/core/v2_7_0/core_nwb_ecephys.py | 20 ++++-- .../pydantic/core/v2_7_0/core_nwb_epoch.py | 2 +- .../pydantic/core/v2_7_0/core_nwb_icephys.py | 2 +- .../pydantic/core/v2_7_0/core_nwb_misc.py | 2 +- .../pydantic/core/v2_7_0/core_nwb_ophys.py | 44 ++++++++++--- .../hdmf_common/v1_1_0/hdmf_common_table.py | 2 +- .../hdmf_common/v1_1_2/hdmf_common_table.py | 2 +- .../hdmf_common/v1_1_3/hdmf_common_table.py | 2 +- .../hdmf_common/v1_2_0/hdmf_common_table.py | 2 +- .../hdmf_common/v1_2_1/hdmf_common_table.py | 2 +- .../v1_3_0/hdmf_common_resources.py | 44 +++++++++---- .../hdmf_common/v1_3_0/hdmf_common_table.py | 2 +- .../hdmf_common/v1_4_0/hdmf_common_table.py | 2 +- .../hdmf_common/v1_5_0/hdmf_common_table.py | 2 +- .../hdmf_common/v1_5_1/hdmf_common_table.py | 2 +- .../hdmf_common/v1_6_0/hdmf_common_table.py | 2 +- .../hdmf_common/v1_7_0/hdmf_common_table.py | 2 +- .../hdmf_common/v1_8_0/hdmf_common_table.py | 2 +- .../v0_1_0/hdmf_experimental_resources.py | 59 ++++++++++++++---- .../v0_2_0/hdmf_experimental_resources.py | 62 ++++++++++++++----- .../v0_3_0/hdmf_experimental_resources.py | 56 ++++++++++++----- .../v0_4_0/hdmf_experimental_resources.py | 62 ++++++++++++++----- .../v0_5_0/hdmf_experimental_resources.py | 62 ++++++++++++++----- .../linkml/core/v2_2_0/core.nwb.ecephys.yaml | 6 ++ .../linkml/core/v2_2_0/core.nwb.epoch.yaml | 6 ++ .../linkml/core/v2_2_1/core.nwb.ecephys.yaml | 6 ++ .../linkml/core/v2_2_1/core.nwb.epoch.yaml | 6 ++ .../linkml/core/v2_2_2/core.nwb.ecephys.yaml | 6 ++ .../linkml/core/v2_2_2/core.nwb.epoch.yaml | 6 ++ .../linkml/core/v2_2_4/core.nwb.ecephys.yaml | 6 ++ .../linkml/core/v2_2_4/core.nwb.epoch.yaml | 6 ++ .../linkml/core/v2_2_4/core.nwb.ophys.yaml | 14 +++++ .../linkml/core/v2_2_5/core.nwb.ecephys.yaml | 6 ++ .../linkml/core/v2_2_5/core.nwb.epoch.yaml | 6 ++ .../linkml/core/v2_2_5/core.nwb.ophys.yaml | 14 +++++ .../linkml/core/v2_3_0/core.nwb.ecephys.yaml | 6 ++ .../linkml/core/v2_3_0/core.nwb.epoch.yaml | 6 ++ .../linkml/core/v2_3_0/core.nwb.ophys.yaml | 14 +++++ .../linkml/core/v2_4_0/core.nwb.base.yaml | 6 ++ .../linkml/core/v2_4_0/core.nwb.ecephys.yaml | 6 ++ .../linkml/core/v2_4_0/core.nwb.epoch.yaml | 6 ++ .../linkml/core/v2_4_0/core.nwb.ophys.yaml | 14 +++++ .../linkml/core/v2_5_0/core.nwb.base.yaml | 6 ++ .../linkml/core/v2_5_0/core.nwb.ecephys.yaml | 6 ++ .../linkml/core/v2_5_0/core.nwb.ophys.yaml | 14 +++++ .../core/v2_6_0_alpha/core.nwb.base.yaml | 6 ++ .../core/v2_6_0_alpha/core.nwb.ecephys.yaml | 6 ++ .../core/v2_6_0_alpha/core.nwb.ophys.yaml | 14 +++++ .../linkml/core/v2_7_0/core.nwb.base.yaml | 6 ++ .../linkml/core/v2_7_0/core.nwb.ecephys.yaml | 6 ++ .../linkml/core/v2_7_0/core.nwb.ophys.yaml | 14 +++++ .../v1_3_0/hdmf-common.resources.yaml | 18 ++++++ .../v0_1_0/hdmf-experimental.resources.yaml | 22 +++++++ .../v0_2_0/hdmf-experimental.resources.yaml | 24 +++++++ .../v0_3_0/hdmf-experimental.resources.yaml | 24 +++++++ .../v0_4_0/hdmf-experimental.resources.yaml | 26 ++++++++ .../v0_5_0/hdmf-experimental.resources.yaml | 26 ++++++++ scripts/generate_core.py | 2 +- 109 files changed, 1182 insertions(+), 273 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index 3e3205f..d1a96d1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -411,9 +411,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py index d7ec41b..8d0fb5d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -15,9 +15,9 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) -from numpydantic import NDArray, Shape from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, VectorIndex, VectorData from ...core.v2_2_0.core_nwb_base import TimeSeries +from numpydantic import NDArray, Shape metamodel_version = "None" version = "2.2.0" @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -181,16 +181,20 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[int] = Field( + idx_start: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: Optional[int] = Field( + count: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: Optional[TimeSeries] = Field( - None, description="""the TimeSeries that this index applies to.""" + timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field( + None, + description="""the TimeSeries that this index applies to.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) description: str = Field(..., description="""Description of what these vectors represent.""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py index 8d42b4d..938bf63 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py index 3a9affd..6c16a46 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py index 8a134f9..433c454 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -70,7 +70,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index 5cbf6b0..f63232e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index d337bbe..92ea135 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -411,9 +411,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py index cfd4e53..02fc4d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -15,9 +15,9 @@ from pydantic import ( ValidationInfo, BeforeValidator, ) -from numpydantic import NDArray, Shape from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, VectorIndex, VectorData from ...core.v2_2_1.core_nwb_base import TimeSeries +from numpydantic import NDArray, Shape metamodel_version = "None" version = "2.2.1" @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -181,16 +181,20 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[int] = Field( + idx_start: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: Optional[int] = Field( + count: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: Optional[TimeSeries] = Field( - None, description="""the TimeSeries that this index applies to.""" + timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field( + None, + description="""the TimeSeries that this index applies to.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) description: str = Field(..., description="""Description of what these vectors represent.""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py index 5be0500..46321df 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py index 9f114c6..60a591f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py index 6ad59b8..552d38b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -70,7 +70,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index 2c68f1f..7969ed1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index fa2e7d2..d757e1a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -411,9 +411,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py index 0464db5..083c41f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -181,16 +181,20 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[int] = Field( + idx_start: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: Optional[int] = Field( + count: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: Optional[TimeSeries] = Field( - None, description="""the TimeSeries that this index applies to.""" + timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field( + None, + description="""the TimeSeries that this index applies to.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py index 29a500f..2b126cd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py index 7485685..d9914b5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py index 48ebb24..f4f9f20 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -70,7 +70,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index 58080df..7bfffac 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -411,9 +411,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py index a086155..6138ade 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -181,16 +181,20 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[int] = Field( + idx_start: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: Optional[int] = Field( + count: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: Optional[TimeSeries] = Field( - None, description="""the TimeSeries that this index applies to.""" + timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field( + None, + description="""the TimeSeries that this index applies to.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py index 0749cca..d24c902 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py index fda23ec..6d705b1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py index 01ecf48..9c96489 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -410,9 +410,21 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the pixel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ @@ -437,10 +449,26 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel z-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the voxel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index 4151cf7..4bd8b19 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -411,9 +411,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py index 900bbeb..e66cf1a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -181,16 +181,20 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[int] = Field( + idx_start: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: Optional[int] = Field( + count: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: Optional[TimeSeries] = Field( - None, description="""the TimeSeries that this index applies to.""" + timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field( + None, + description="""the TimeSeries that this index applies to.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py index b99d4ca..94cf0cf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py index 591e521..4fe4673 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py index 3bb6356..9b0d75e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -412,9 +412,21 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the pixel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ @@ -439,10 +451,26 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel z-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the voxel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index c3b57c3..29faa7b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -419,9 +419,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py index d82e78d..b6d6397 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -178,16 +178,20 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[int] = Field( + idx_start: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: Optional[int] = Field( + count: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: Optional[TimeSeries] = Field( - None, description="""the TimeSeries that this index applies to.""" + timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field( + None, + description="""the TimeSeries that this index applies to.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py index 6cb3a16..a439eb5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py index c7336be..14df410 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py index 1aab4fb..d6c991a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -418,9 +418,21 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the pixel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ @@ -445,10 +457,26 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel z-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the voxel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py index c7e83cd..aa29c83 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -90,15 +90,21 @@ class TimeSeriesReferenceVectorData(VectorData): name: str = Field( "timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}} ) - idx_start: int = Field( + idx_start: NDArray[Shape["*"], int] = Field( ..., description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: int = Field( + count: NDArray[Shape["*"], int] = Field( ..., description="""Number of data samples available in this time series, during this epoch""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + timeseries: NDArray[Shape["*"], TimeSeries] = Field( + ..., + description="""The TimeSeries that this index applies to""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""") description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index 8130e81..30e8b3e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -419,9 +419,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 8828e93..1161d72 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -178,16 +178,20 @@ class TimeIntervalsTimeseries(VectorData): "linkml_meta": {"equals_string": "timeseries", "ifabsent": "string(timeseries)"} }, ) - idx_start: Optional[int] = Field( + idx_start: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: Optional[int] = Field( + count: Optional[NDArray[Shape["*"], int]] = Field( None, description="""Number of data samples available in this time series, during this epoch.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: Optional[TimeSeries] = Field( - None, description="""the TimeSeries that this index applies to.""" + timeseries: Optional[NDArray[Shape["*"], TimeSeries]] = Field( + None, + description="""the TimeSeries that this index applies to.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py index d21047d..11318b3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py index b9919ef..0828ebf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 47b323c..2871455 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -418,9 +418,21 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the pixel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ @@ -445,10 +457,26 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel z-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the voxel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py index ab1c061..8ab4e6f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -114,15 +114,21 @@ class TimeSeriesReferenceVectorData(VectorData): name: str = Field( "timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}} ) - idx_start: int = Field( + idx_start: NDArray[Shape["*"], int] = Field( ..., description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: int = Field( + count: NDArray[Shape["*"], int] = Field( ..., description="""Number of data samples available in this time series, during this epoch""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + timeseries: NDArray[Shape["*"], TimeSeries] = Field( + ..., + description="""The TimeSeries that this index applies to""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""") description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index 74c571f..f5e12e0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -419,9 +419,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py index 9181482..26b4717 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py index e5ab584..e802bcf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py index 8a8baaf..1459133 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py index a07c17a..fbc5c57 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -418,9 +418,21 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the pixel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ @@ -445,10 +457,26 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel z-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the voxel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 08f2b79..3151433 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -114,15 +114,21 @@ class TimeSeriesReferenceVectorData(VectorData): name: str = Field( "timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}} ) - idx_start: int = Field( + idx_start: NDArray[Shape["*"], int] = Field( ..., description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: int = Field( + count: NDArray[Shape["*"], int] = Field( ..., description="""Number of data samples available in this time series, during this epoch""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + timeseries: NDArray[Shape["*"], TimeSeries] = Field( + ..., + description="""The TimeSeries that this index applies to""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""") description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index a3371dd..40d3c1a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -419,9 +419,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index 0ec3f6b..92e28fa 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index ea2c7ec..e6cb759 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index a8b3e9a..93d732d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index 22d467f..cfc2f60 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -517,9 +517,21 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the pixel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ @@ -544,10 +556,26 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel z-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the voxel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py index 1e2d4a5..f84cee5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -114,15 +114,21 @@ class TimeSeriesReferenceVectorData(VectorData): name: str = Field( "timeseries", json_schema_extra={"linkml_meta": {"ifabsent": "string(timeseries)"}} ) - idx_start: int = Field( + idx_start: NDArray[Shape["*"], int] = Field( ..., description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - count: int = Field( + count: NDArray[Shape["*"], int] = Field( ..., description="""Number of data samples available in this time series, during this epoch""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + timeseries: NDArray[Shape["*"], TimeSeries] = Field( + ..., + description="""The TimeSeries that this index applies to""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - timeseries: TimeSeries = Field(..., description="""The TimeSeries that this index applies to""") description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index f772257..aed71ed 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -69,7 +69,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -419,9 +419,21 @@ class ElectrodeGroupPosition(ConfiguredBaseModel): "linkml_meta": {"equals_string": "position", "ifabsent": "string(position)"} }, ) - x: Optional[float] = Field(None, description="""x coordinate""") - y: Optional[float] = Field(None, description="""y coordinate""") - z: Optional[float] = Field(None, description="""z coordinate""") + x: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""x coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""y coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""z coordinate""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) class ClusterWaveforms(NWBDataInterface): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py index 0eb0390..90c2524 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -62,7 +62,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py index 577ddb8..93a555a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py index 57697a3..edf6336 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -68,7 +68,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py index d734f11..6e4b60f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -75,7 +75,7 @@ ModelType = TypeVar("ModelType", bound=Type[BaseModel]) def _get_name(item: ModelType | dict, info: ValidationInfo) -> Union[ModelType, dict]: """Get the name of the slot that refers to this object""" - assert isinstance(item, (BaseModel, dict)) + assert isinstance(item, (BaseModel, dict)), f"{item} was not a BaseModel or a dict!" name = info.field_name if isinstance(item, BaseModel): item.name = name @@ -517,9 +517,21 @@ class PlaneSegmentationPixelMask(VectorData): "linkml_meta": {"equals_string": "pixel_mask", "ifabsent": "string(pixel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") - y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Pixel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the pixel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ @@ -544,10 +556,26 @@ class PlaneSegmentationVoxelMask(VectorData): "linkml_meta": {"equals_string": "voxel_mask", "ifabsent": "string(voxel_mask)"} }, ) - x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") - y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") - z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") - weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + x: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel x-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + y: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel y-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + z: Optional[NDArray[Shape["*"], int]] = Field( + None, + description="""Voxel z-coordinate.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + weight: Optional[NDArray[Shape["*"], float]] = Field( + None, + description="""Weight of the voxel.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) description: str = Field(..., description="""Description of what these vectors represent.""") value: Optional[ Union[ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 0c982dd..a37ac18 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -414,7 +414,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 9a2696f..2a0c39a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -414,7 +414,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 35fe280..57472fa 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -414,7 +414,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index d12fd85..cbefb40 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -415,7 +415,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 1bfb82c..c62bd06 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -415,7 +415,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py index 00eaee4..57a1938 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py @@ -8,6 +8,7 @@ from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np from ...hdmf_common.v1_3_0.hdmf_common_base import Container, Data +from numpydantic import NDArray, Shape metamodel_version = "None" version = "1.3.0" @@ -45,6 +46,7 @@ class LinkMLMeta(RootModel): return key in self.root +NUMPYDANTIC_VERSION = "1.2.1" linkml_meta = LinkMLMeta( { "annotations": { @@ -96,9 +98,10 @@ class ExternalResourcesKeys(Data): "keys", json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, ) - key_name: str = Field( + key_name: NDArray[Shape["*"], str] = Field( ..., description="""The user term that maps to one or more resources in the 'resources' table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -115,17 +118,25 @@ class ExternalResourcesResources(Data): "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"} }, ) - keytable_idx: int = Field(..., description="""The index to the key in the 'keys' table.""") - resource_name: str = Field( + keytable_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index to the key in the 'keys' table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + resource_name: NDArray[Shape["*"], str] = Field( ..., description="""The name of the online resource (e.g., website, database) that has the entity.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - resource_id: str = Field( - ..., description="""The unique identifier for the resource entity at the resource.""" + resource_id: NDArray[Shape["*"], str] = Field( + ..., + description="""The unique identifier for the resource entity at the resource.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - uri: str = Field( + uri: NDArray[Shape["*"], str] = Field( ..., description="""The URI for the resource entity this reference applies to. This can be an empty string.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -142,10 +153,15 @@ class ExternalResourcesObjects(Data): "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} }, ) - object_id: str = Field(..., description="""The UUID for the object.""") - field: str = Field( + object_id: NDArray[Shape["*"], str] = Field( + ..., + description="""The UUID for the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + field: NDArray[Shape["*"], str] = Field( ..., description="""The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -162,10 +178,16 @@ class ExternalResourcesObjectKeys(Data): "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} }, ) - objecttable_idx: int = Field( - ..., description="""The index to the 'objects' table for the object that holds the key.""" + objecttable_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index to the 'objects' table for the object that holds the key.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + keytable_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index to the 'keys' table for the key.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - keytable_idx: int = Field(..., description="""The index to the 'keys' table for the key.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index b19b63a..c0e6522 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -415,7 +415,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index b3d2f89..d3599bb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -415,7 +415,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 53e9996..946d90f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -415,7 +415,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 641f20c..95d594e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -415,7 +415,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index 45e4269..ea5d7b1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -415,7 +415,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 0ca3130..3c4c993 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -415,7 +415,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index d6e1081..60bbadf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -415,7 +415,7 @@ class DynamicTableMixin(BaseModel): return model @model_validator(mode="after") - def cast_extra_columns(self): + def cast_extra_columns(self) -> "DynamicTableMixin": """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index 6b84d69..6b1a964 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -8,6 +8,7 @@ from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np from ...hdmf_common.v1_4_0.hdmf_common_base import Container, Data +from numpydantic import NDArray, Shape metamodel_version = "None" version = "0.1.0" @@ -45,6 +46,7 @@ class LinkMLMeta(RootModel): return key in self.root +NUMPYDANTIC_VERSION = "1.2.1" linkml_meta = LinkMLMeta( { "annotations": { @@ -99,9 +101,10 @@ class ExternalResourcesKeys(Data): "keys", json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, ) - key: str = Field( + key: NDArray[Shape["*"], str] = Field( ..., description="""The user term that maps to one or more resources in the 'resources' table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -118,12 +121,25 @@ class ExternalResourcesEntities(Data): "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} }, ) - keys_idx: int = Field(..., description="""The index to the key in the 'keys' table.""") - resources_idx: int = Field(..., description="""The index into the 'resources' table""") - entity_id: str = Field(..., description="""The unique identifier entity.""") - entity_uri: str = Field( + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index to the key in the 'keys' table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + resources_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index into the 'resources' table""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + entity_id: NDArray[Shape["*"], str] = Field( + ..., + description="""The unique identifier entity.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + entity_uri: NDArray[Shape["*"], str] = Field( ..., description="""The URI for the entity this reference applies to. This can be an empty string.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -140,9 +156,15 @@ class ExternalResourcesResources(Data): "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"} }, ) - resource: str = Field(..., description="""The name of the resource.""") - resource_uri: str = Field( - ..., description="""The URI for the resource. This can be an empty string.""" + resource: NDArray[Shape["*"], str] = Field( + ..., + description="""The name of the resource.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + resource_uri: NDArray[Shape["*"], str] = Field( + ..., + description="""The URI for the resource. This can be an empty string.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -159,10 +181,15 @@ class ExternalResourcesObjects(Data): "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} }, ) - object_id: str = Field(..., description="""The UUID for the object.""") - field: str = Field( + object_id: NDArray[Shape["*"], str] = Field( + ..., + description="""The UUID for the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + field: NDArray[Shape["*"], str] = Field( ..., description="""The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -179,10 +206,16 @@ class ExternalResourcesObjectKeys(Data): "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} }, ) - objects_idx: int = Field( - ..., description="""The index to the 'objects' table for the object that holds the key.""" + objects_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index to the 'objects' table for the object that holds the key.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index to the 'keys' table for the key.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - keys_idx: int = Field(..., description="""The index to the 'keys' table for the key.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py index a9ea6d5..810f8ef 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py @@ -8,6 +8,7 @@ from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np from ...hdmf_common.v1_5_1.hdmf_common_base import Container, Data +from numpydantic import NDArray, Shape metamodel_version = "None" version = "0.2.0" @@ -45,6 +46,7 @@ class LinkMLMeta(RootModel): return key in self.root +NUMPYDANTIC_VERSION = "1.2.1" linkml_meta = LinkMLMeta( { "annotations": { @@ -99,9 +101,10 @@ class ExternalResourcesKeys(Data): "keys", json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, ) - key: str = Field( + key: NDArray[Shape["*"], str] = Field( ..., description="""The user term that maps to one or more resources in the 'resources' table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -118,12 +121,25 @@ class ExternalResourcesEntities(Data): "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} }, ) - keys_idx: int = Field(..., description="""The index to the key in the 'keys' table.""") - resources_idx: int = Field(..., description="""The index into the 'resources' table""") - entity_id: str = Field(..., description="""The unique identifier entity.""") - entity_uri: str = Field( + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index to the key in the 'keys' table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + resources_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index into the 'resources' table""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + entity_id: NDArray[Shape["*"], str] = Field( + ..., + description="""The unique identifier entity.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + entity_uri: NDArray[Shape["*"], str] = Field( ..., description="""The URI for the entity this reference applies to. This can be an empty string.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -140,9 +156,15 @@ class ExternalResourcesResources(Data): "linkml_meta": {"equals_string": "resources", "ifabsent": "string(resources)"} }, ) - resource: str = Field(..., description="""The name of the resource.""") - resource_uri: str = Field( - ..., description="""The URI for the resource. This can be an empty string.""" + resource: NDArray[Shape["*"], str] = Field( + ..., + description="""The name of the resource.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + resource_uri: NDArray[Shape["*"], str] = Field( + ..., + description="""The URI for the resource. This can be an empty string.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -159,14 +181,20 @@ class ExternalResourcesObjects(Data): "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} }, ) - object_id: str = Field(..., description="""The UUID for the object.""") - relative_path: str = Field( + object_id: NDArray[Shape["*"], str] = Field( + ..., + description="""The UUID for the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + relative_path: NDArray[Shape["*"], str] = Field( ..., description="""The relative path from the container with the object_id to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the container is a dataset which contains the value(s) that is associated with an external resource.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - field: str = Field( + field: NDArray[Shape["*"], str] = Field( ..., description="""The field of the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -183,10 +211,16 @@ class ExternalResourcesObjectKeys(Data): "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} }, ) - objects_idx: int = Field( - ..., description="""The index to the 'objects' table for the object that holds the key.""" + objects_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index to the 'objects' table for the object that holds the key.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The index to the 'keys' table for the key.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - keys_idx: int = Field(..., description="""The index to the 'keys' table for the key.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py index c2fec76..4404c7f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py @@ -8,6 +8,7 @@ from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np from ...hdmf_common.v1_6_0.hdmf_common_base import Container, Data +from numpydantic import NDArray, Shape metamodel_version = "None" version = "0.3.0" @@ -45,6 +46,7 @@ class LinkMLMeta(RootModel): return key in self.root +NUMPYDANTIC_VERSION = "1.2.1" linkml_meta = LinkMLMeta( { "annotations": { @@ -99,9 +101,10 @@ class ExternalResourcesKeys(Data): "keys", json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, ) - key: str = Field( + key: NDArray[Shape["*"], str] = Field( ..., description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -116,9 +119,10 @@ class ExternalResourcesFiles(Data): "files", json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}}, ) - file_object_id: str = Field( + file_object_id: NDArray[Shape["*"], str] = Field( ..., description="""The object id (UUID) of a file that contains objects that refers to external resources.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -135,14 +139,20 @@ class ExternalResourcesEntities(Data): "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} }, ) - keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") - entity_id: str = Field( + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the key in the `keys` table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + entity_id: NDArray[Shape["*"], str] = Field( ..., description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - entity_uri: str = Field( + entity_uri: NDArray[Shape["*"], str] = Field( ..., description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -159,18 +169,30 @@ class ExternalResourcesObjects(Data): "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} }, ) - files_idx: int = Field( - ..., description="""The row index to the file in the `files` table containing the object.""" + files_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the file in the `files` table containing the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - object_id: str = Field(..., description="""The object id (UUID) of the object.""") - object_type: str = Field(..., description="""The data type of the object.""") - relative_path: str = Field( + object_id: NDArray[Shape["*"], str] = Field( + ..., + description="""The object id (UUID) of the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + object_type: NDArray[Shape["*"], str] = Field( + ..., + description="""The data type of the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + relative_path: NDArray[Shape["*"], str] = Field( ..., description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - field: str = Field( + field: NDArray[Shape["*"], str] = Field( ..., description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -187,10 +209,16 @@ class ExternalResourcesObjectKeys(Data): "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} }, ) - objects_idx: int = Field( - ..., description="""The row index to the object in the `objects` table that holds the key""" + objects_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the object in the `objects` table that holds the key""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the key in the `keys` table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py index f0bb510..7eef961 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py @@ -8,6 +8,7 @@ from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np from ...hdmf_common.v1_7_0.hdmf_common_base import Container, Data +from numpydantic import NDArray, Shape metamodel_version = "None" version = "0.4.0" @@ -45,6 +46,7 @@ class LinkMLMeta(RootModel): return key in self.root +NUMPYDANTIC_VERSION = "1.2.1" linkml_meta = LinkMLMeta( { "annotations": { @@ -102,9 +104,10 @@ class ExternalResourcesKeys(Data): "keys", json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, ) - key: str = Field( + key: NDArray[Shape["*"], str] = Field( ..., description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -119,9 +122,10 @@ class ExternalResourcesFiles(Data): "files", json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}}, ) - file_object_id: str = Field( + file_object_id: NDArray[Shape["*"], str] = Field( ..., description="""The object id (UUID) of a file that contains objects that refers to external resources.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -138,13 +142,15 @@ class ExternalResourcesEntities(Data): "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} }, ) - entity_id: str = Field( + entity_id: NDArray[Shape["*"], str] = Field( ..., description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - entity_uri: str = Field( + entity_uri: NDArray[Shape["*"], str] = Field( ..., description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -161,18 +167,30 @@ class ExternalResourcesObjects(Data): "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} }, ) - files_idx: int = Field( - ..., description="""The row index to the file in the `files` table containing the object.""" + files_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the file in the `files` table containing the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - object_id: str = Field(..., description="""The object id (UUID) of the object.""") - object_type: str = Field(..., description="""The data type of the object.""") - relative_path: str = Field( + object_id: NDArray[Shape["*"], str] = Field( + ..., + description="""The object id (UUID) of the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + object_type: NDArray[Shape["*"], str] = Field( + ..., + description="""The data type of the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + relative_path: NDArray[Shape["*"], str] = Field( ..., description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - field: str = Field( + field: NDArray[Shape["*"], str] = Field( ..., description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -189,10 +207,16 @@ class ExternalResourcesObjectKeys(Data): "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} }, ) - objects_idx: int = Field( - ..., description="""The row index to the object in the `objects` table that holds the key""" + objects_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the object in the `objects` table that holds the key""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the key in the `keys` table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") class ExternalResourcesEntityKeys(Data): @@ -208,10 +232,16 @@ class ExternalResourcesEntityKeys(Data): "linkml_meta": {"equals_string": "entity_keys", "ifabsent": "string(entity_keys)"} }, ) - entities_idx: int = Field( - ..., description="""The row index to the entity in the `entities` table.""" + entities_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the entity in the `entities` table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the key in the `keys` table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py index 8c33e35..0adfcc3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py @@ -8,6 +8,7 @@ from typing import Any, ClassVar, List, Literal, Dict, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data +from numpydantic import NDArray, Shape metamodel_version = "None" version = "0.5.0" @@ -45,6 +46,7 @@ class LinkMLMeta(RootModel): return key in self.root +NUMPYDANTIC_VERSION = "1.2.1" linkml_meta = LinkMLMeta( { "annotations": { @@ -102,9 +104,10 @@ class HERDKeys(Data): "keys", json_schema_extra={"linkml_meta": {"equals_string": "keys", "ifabsent": "string(keys)"}}, ) - key: str = Field( + key: NDArray[Shape["*"], str] = Field( ..., description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -119,9 +122,10 @@ class HERDFiles(Data): "files", json_schema_extra={"linkml_meta": {"equals_string": "files", "ifabsent": "string(files)"}}, ) - file_object_id: str = Field( + file_object_id: NDArray[Shape["*"], str] = Field( ..., description="""The object id (UUID) of a file that contains objects that refers to external resources.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -138,13 +142,15 @@ class HERDEntities(Data): "linkml_meta": {"equals_string": "entities", "ifabsent": "string(entities)"} }, ) - entity_id: str = Field( + entity_id: NDArray[Shape["*"], str] = Field( ..., description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - entity_uri: str = Field( + entity_uri: NDArray[Shape["*"], str] = Field( ..., description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -161,18 +167,30 @@ class HERDObjects(Data): "linkml_meta": {"equals_string": "objects", "ifabsent": "string(objects)"} }, ) - files_idx: int = Field( - ..., description="""The row index to the file in the `files` table containing the object.""" + files_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the file in the `files` table containing the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - object_id: str = Field(..., description="""The object id (UUID) of the object.""") - object_type: str = Field(..., description="""The data type of the object.""") - relative_path: str = Field( + object_id: NDArray[Shape["*"], str] = Field( + ..., + description="""The object id (UUID) of the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + object_type: NDArray[Shape["*"], str] = Field( + ..., + description="""The data type of the object.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + relative_path: NDArray[Shape["*"], str] = Field( ..., description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - field: str = Field( + field: NDArray[Shape["*"], str] = Field( ..., description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) @@ -189,10 +207,16 @@ class HERDObjectKeys(Data): "linkml_meta": {"equals_string": "object_keys", "ifabsent": "string(object_keys)"} }, ) - objects_idx: int = Field( - ..., description="""The row index to the object in the `objects` table that holds the key""" + objects_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the object in the `objects` table that holds the key""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the key in the `keys` table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") class HERDEntityKeys(Data): @@ -208,10 +232,16 @@ class HERDEntityKeys(Data): "linkml_meta": {"equals_string": "entity_keys", "ifabsent": "string(entity_keys)"} }, ) - entities_idx: int = Field( - ..., description="""The row index to the entity in the `entities` table.""" + entities_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the entity in the `entities` table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, + ) + keys_idx: NDArray[Shape["*"], int] = Field( + ..., + description="""The row index to the key in the `keys` table.""", + json_schema_extra={"linkml_meta": {"array": {"exact_number_dimensions": 1}}}, ) - keys_idx: int = Field(..., description="""The row index to the key in the `keys` table.""") # Model rebuild diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml index 7d61f52..52f4c5a 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.ecephys.yaml @@ -334,18 +334,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml index 924f069..2e5ca8e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_0/core.nwb.epoch.yaml @@ -100,6 +100,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false @@ -107,12 +109,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false timeseries: name: timeseries description: the TimeSeries that this index applies to. + array: + exact_number_dimensions: 1 range: TimeSeries required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml index d5aaa6e..2244ea6 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.ecephys.yaml @@ -334,18 +334,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml index 1f086dc..e7e66c5 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_1/core.nwb.epoch.yaml @@ -100,6 +100,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false @@ -107,12 +109,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false timeseries: name: timeseries description: the TimeSeries that this index applies to. + array: + exact_number_dimensions: 1 range: TimeSeries required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml index 6641150..70f1c6c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.ecephys.yaml @@ -334,18 +334,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml index 18c60a6..6121bb5 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_2/core.nwb.epoch.yaml @@ -100,6 +100,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false @@ -107,12 +109,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false timeseries: name: timeseries description: the TimeSeries that this index applies to. + array: + exact_number_dimensions: 1 range: TimeSeries required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml index 8acdc93..166af8a 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ecephys.yaml @@ -334,18 +334,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml index 3365825..68a7004 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.epoch.yaml @@ -100,6 +100,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false @@ -107,12 +109,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false timeseries: name: timeseries description: the TimeSeries that this index applies to. + array: + exact_number_dimensions: 1 range: TimeSeries required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml index 73f7c6f..4d7568e 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_4/core.nwb.ophys.yaml @@ -258,18 +258,24 @@ classes: x: name: x description: Pixel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Pixel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the pixel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false @@ -289,24 +295,32 @@ classes: x: name: x description: Voxel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Voxel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false z: name: z description: Voxel z-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the voxel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml index 0e11ce2..d7e2d98 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ecephys.yaml @@ -334,18 +334,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml index 9a3bedf..2b4fbde 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.epoch.yaml @@ -100,6 +100,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false @@ -107,12 +109,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false timeseries: name: timeseries description: the TimeSeries that this index applies to. + array: + exact_number_dimensions: 1 range: TimeSeries required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml index 6613637..2424cb5 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_2_5/core.nwb.ophys.yaml @@ -258,18 +258,24 @@ classes: x: name: x description: Pixel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Pixel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the pixel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false @@ -289,24 +295,32 @@ classes: x: name: x description: Voxel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Voxel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false z: name: z description: Voxel z-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the voxel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml index 12ad322..2863bba 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ecephys.yaml @@ -345,18 +345,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml index 6616781..ce14120 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.epoch.yaml @@ -100,6 +100,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false @@ -107,12 +109,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false timeseries: name: timeseries description: the TimeSeries that this index applies to. + array: + exact_number_dimensions: 1 range: TimeSeries required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml index 9e86441..b208d50 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_3_0/core.nwb.ophys.yaml @@ -258,18 +258,24 @@ classes: x: name: x description: Pixel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Pixel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the pixel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false @@ -289,24 +295,32 @@ classes: x: name: x description: Voxel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Voxel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false z: name: z description: Voxel z-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the voxel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml index 8ba1ca7..1d817de 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.base.yaml @@ -41,6 +41,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: true multivalued: false @@ -48,12 +50,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch + array: + exact_number_dimensions: 1 range: int32 required: true multivalued: false timeseries: name: timeseries description: The TimeSeries that this index applies to + array: + exact_number_dimensions: 1 range: TimeSeries required: true multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml index e572bd1..dd93758 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ecephys.yaml @@ -345,18 +345,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml index 512513e..c3fb2cb 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.epoch.yaml @@ -100,6 +100,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false @@ -107,12 +109,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch. + array: + exact_number_dimensions: 1 range: int32 required: false multivalued: false timeseries: name: timeseries description: the TimeSeries that this index applies to. + array: + exact_number_dimensions: 1 range: TimeSeries required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml index f59ef09..4317684 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_4_0/core.nwb.ophys.yaml @@ -258,18 +258,24 @@ classes: x: name: x description: Pixel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Pixel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the pixel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false @@ -289,24 +295,32 @@ classes: x: name: x description: Voxel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Voxel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false z: name: z description: Voxel z-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the voxel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml index 1a9d3a8..e6679dc 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.base.yaml @@ -41,6 +41,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: true multivalued: false @@ -48,12 +50,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch + array: + exact_number_dimensions: 1 range: int32 required: true multivalued: false timeseries: name: timeseries description: The TimeSeries that this index applies to + array: + exact_number_dimensions: 1 range: TimeSeries required: true multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml index 1562caf..2efc5c7 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ecephys.yaml @@ -345,18 +345,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml index 2c94d34..e0c051a 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_5_0/core.nwb.ophys.yaml @@ -258,18 +258,24 @@ classes: x: name: x description: Pixel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Pixel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the pixel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false @@ -289,24 +295,32 @@ classes: x: name: x description: Voxel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Voxel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false z: name: z description: Voxel z-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the voxel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml index ad3eeb6..e45d2cb 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.base.yaml @@ -41,6 +41,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: true multivalued: false @@ -48,12 +50,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch + array: + exact_number_dimensions: 1 range: int32 required: true multivalued: false timeseries: name: timeseries description: The TimeSeries that this index applies to + array: + exact_number_dimensions: 1 range: TimeSeries required: true multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml index 3fab077..5dba82c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ecephys.yaml @@ -345,18 +345,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml index 00607b0..80a1f6c 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_6_0_alpha/core.nwb.ophys.yaml @@ -311,18 +311,24 @@ classes: x: name: x description: Pixel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Pixel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the pixel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false @@ -342,24 +348,32 @@ classes: x: name: x description: Voxel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Voxel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false z: name: z description: Voxel z-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the voxel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml index ec53dc1..21a57b1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml @@ -41,6 +41,8 @@ classes: description: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time. + array: + exact_number_dimensions: 1 range: int32 required: true multivalued: false @@ -48,12 +50,16 @@ classes: name: count description: Number of data samples available in this time series, during this epoch + array: + exact_number_dimensions: 1 range: int32 required: true multivalued: false timeseries: name: timeseries description: The TimeSeries that this index applies to + array: + exact_number_dimensions: 1 range: TimeSeries required: true multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml index 8485a7d..54d0f4f 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml @@ -345,18 +345,24 @@ classes: x: name: x description: x coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false y: name: y description: y coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false z: name: z description: z coordinate + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml index 478cc92..053698d 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml @@ -311,18 +311,24 @@ classes: x: name: x description: Pixel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Pixel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the pixel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false @@ -342,24 +348,32 @@ classes: x: name: x description: Voxel x-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false y: name: y description: Voxel y-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false z: name: z description: Voxel z-coordinate. + array: + exact_number_dimensions: 1 range: uint32 required: false multivalued: false weight: name: weight description: Weight of the voxel. + array: + exact_number_dimensions: 1 range: float32 required: false multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml index 918a6a5..3bbb768 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_3_0/hdmf-common.resources.yaml @@ -67,6 +67,8 @@ classes: name: key_name description: The user term that maps to one or more resources in the 'resources' table. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -84,6 +86,8 @@ classes: keytable_idx: name: keytable_idx description: The index to the key in the 'keys' table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false @@ -91,12 +95,16 @@ classes: name: resource_name description: The name of the online resource (e.g., website, database) that has the entity. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false resource_id: name: resource_id description: The unique identifier for the resource entity at the resource. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -104,6 +112,8 @@ classes: name: uri description: The URI for the resource entity this reference applies to. This can be an empty string. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -122,6 +132,8 @@ classes: object_id: name: object_id description: The UUID for the object. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -129,6 +141,8 @@ classes: name: field description: The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -147,12 +161,16 @@ classes: name: objecttable_idx description: The index to the 'objects' table for the object that holds the key. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false keytable_idx: name: keytable_idx description: The index to the 'keys' table for the key. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml index 89ffc2c..05dc855 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_1_0/hdmf-experimental.resources.yaml @@ -73,6 +73,8 @@ classes: name: key description: The user term that maps to one or more resources in the 'resources' table. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -90,18 +92,24 @@ classes: keys_idx: name: keys_idx description: The index to the key in the 'keys' table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false resources_idx: name: resources_idx description: The index into the 'resources' table + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false entity_id: name: entity_id description: The unique identifier entity. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -109,6 +117,8 @@ classes: name: entity_uri description: The URI for the entity this reference applies to. This can be an empty string. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -126,12 +136,16 @@ classes: resource: name: resource description: The name of the resource. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false resource_uri: name: resource_uri description: The URI for the resource. This can be an empty string. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -150,6 +164,8 @@ classes: object_id: name: object_id description: The UUID for the object. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -157,6 +173,8 @@ classes: name: field description: The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -175,12 +193,16 @@ classes: name: objects_idx description: The index to the 'objects' table for the object that holds the key. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false keys_idx: name: keys_idx description: The index to the 'keys' table for the key. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml index c2fc8d8..a1b6ec0 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_2_0/hdmf-experimental.resources.yaml @@ -73,6 +73,8 @@ classes: name: key description: The user term that maps to one or more resources in the 'resources' table. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -90,18 +92,24 @@ classes: keys_idx: name: keys_idx description: The index to the key in the 'keys' table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false resources_idx: name: resources_idx description: The index into the 'resources' table + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false entity_id: name: entity_id description: The unique identifier entity. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -109,6 +117,8 @@ classes: name: entity_uri description: The URI for the entity this reference applies to. This can be an empty string. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -126,12 +136,16 @@ classes: resource: name: resource description: The name of the resource. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false resource_uri: name: resource_uri description: The URI for the resource. This can be an empty string. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -150,6 +164,8 @@ classes: object_id: name: object_id description: The UUID for the object. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -159,6 +175,8 @@ classes: dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the container is a dataset which contains the value(s) that is associated with an external resource. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -167,6 +185,8 @@ classes: description: The field of the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -185,12 +205,16 @@ classes: name: objects_idx description: The index to the 'objects' table for the object that holds the key. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false keys_idx: name: keys_idx description: The index to the 'keys' table for the key. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml index 350ef24..ca25659 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_3_0/hdmf-experimental.resources.yaml @@ -73,6 +73,8 @@ classes: name: key description: The user term that maps to one or more resources in the `resources` table, e.g., "human". + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -91,6 +93,8 @@ classes: name: file_object_id description: The object id (UUID) of a file that contains objects that refers to external resources. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -108,6 +112,8 @@ classes: keys_idx: name: keys_idx description: The row index to the key in the `keys` table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false @@ -115,6 +121,8 @@ classes: name: entity_id description: The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -122,6 +130,8 @@ classes: name: entity_uri description: The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -141,18 +151,24 @@ classes: name: files_idx description: The row index to the file in the `files` table containing the object. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false object_id: name: object_id description: The object id (UUID) of the object. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false object_type: name: object_type description: The data type of the object. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -162,6 +178,8 @@ classes: the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -170,6 +188,8 @@ classes: description: The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -188,12 +208,16 @@ classes: name: objects_idx description: The row index to the object in the `objects` table that holds the key + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml index 8768e73..e2acf65 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_4_0/hdmf-experimental.resources.yaml @@ -79,6 +79,8 @@ classes: name: key description: The user term that maps to one or more resources in the `resources` table, e.g., "human". + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -97,6 +99,8 @@ classes: name: file_object_id description: The object id (UUID) of a file that contains objects that refers to external resources. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -115,6 +119,8 @@ classes: name: entity_id description: The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -122,6 +128,8 @@ classes: name: entity_uri description: The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -141,18 +149,24 @@ classes: name: files_idx description: The row index to the file in the `files` table containing the object. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false object_id: name: object_id description: The object id (UUID) of the object. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false object_type: name: object_type description: The data type of the object. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -162,6 +176,8 @@ classes: the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -170,6 +186,8 @@ classes: description: The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -188,12 +206,16 @@ classes: name: objects_idx description: The row index to the object in the `objects` table that holds the key + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false @@ -211,12 +233,16 @@ classes: entities_idx: name: entities_idx description: The row index to the entity in the `entities` table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml index d4514ed..7478fe1 100644 --- a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml @@ -78,6 +78,8 @@ classes: name: key description: The user term that maps to one or more resources in the `resources` table, e.g., "human". + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -96,6 +98,8 @@ classes: name: file_object_id description: The object id (UUID) of a file that contains objects that refers to external resources. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -114,6 +118,8 @@ classes: name: entity_id description: The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -121,6 +127,8 @@ classes: name: entity_uri description: The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -140,18 +148,24 @@ classes: name: files_idx description: The row index to the file in the `files` table containing the object. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false object_id: name: object_id description: The object id (UUID) of the object. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false object_type: name: object_type description: The data type of the object. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -161,6 +175,8 @@ classes: the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -169,6 +185,8 @@ classes: description: The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string. + array: + exact_number_dimensions: 1 range: text required: true multivalued: false @@ -187,12 +205,16 @@ classes: name: objects_idx description: The row index to the object in the `objects` table that holds the key + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false @@ -210,12 +232,16 @@ classes: entities_idx: name: entities_idx description: The row index to the entity in the `entities` table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false keys_idx: name: keys_idx description: The row index to the key in the `keys` table. + array: + exact_number_dimensions: 1 range: uint required: true multivalued: false diff --git a/scripts/generate_core.py b/scripts/generate_core.py index af33c37..ea037f9 100644 --- a/scripts/generate_core.py +++ b/scripts/generate_core.py @@ -174,7 +174,7 @@ def generate_versions( # import the most recent version of the schemaz we built latest_version = sorted( - (pydantic_path / "pydantic" / "core").glob('v*'), key=os.path.getmtime + (pydantic_path / "pydantic" / "core").glob("v*"), key=os.path.getmtime )[-1] # make inits to use the schema! we don't usually do this in the From 06a18c23a86bb5a9e1f0745f7ccac4635d384813 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 12 Aug 2024 22:57:00 -0700 Subject: [PATCH 39/61] working aligned dynamic table and TimeSeriesReferenceVectorData --- nwb_linkml/pdm.lock | 104 +++++++-------- nwb_linkml/pyproject.toml | 2 +- nwb_linkml/src/nwb_linkml/adapters/adapter.py | 4 +- .../src/nwb_linkml/generators/pydantic.py | 26 +++- nwb_linkml/src/nwb_linkml/includes/base.py | 14 +++ nwb_linkml/src/nwb_linkml/includes/hdmf.py | 119 ++++++++++++++++++ .../tests/test_adapters/test_adapter.py | 1 + nwb_linkml/tests/test_includes/conftest.py | 58 ++++++++- nwb_linkml/tests/test_includes/test_hdmf.py | 38 ++++++ 9 files changed, 307 insertions(+), 59 deletions(-) create mode 100644 nwb_linkml/src/nwb_linkml/includes/base.py diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock index 51a3fcc..e3af7cb 100644 --- a/nwb_linkml/pdm.lock +++ b/nwb_linkml/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "plot", "tests"] strategy = ["inherit_metadata"] lock_version = "4.5.0" -content_hash = "sha256:903c3aeebf0fb234263b45213693f0eaee7ac290d22633b1d7a4d5aff51d032b" +content_hash = "sha256:ed633a147948a9923f6b3a99690d5d8bad0b4b8c0d528abe62d132b05d1d9f39" [[metadata.targets]] requires_python = ">=3.10,<3.13" @@ -1036,7 +1036,7 @@ files = [ [[package]] name = "numpydantic" -version = "1.3.0" +version = "1.3.1" requires_python = "<4.0,>=3.9" summary = "Type and shape validation and serialization for numpy arrays in pydantic models" groups = ["default"] @@ -1046,8 +1046,8 @@ dependencies = [ "typing-extensions>=4.11.0; python_version < \"3.11\"", ] files = [ - {file = "numpydantic-1.3.0-py3-none-any.whl", hash = "sha256:bda3aa2cd858e9211006be8b8e589e1905b2c6a2db17cec0c28563ba1ad66b68"}, - {file = "numpydantic-1.3.0.tar.gz", hash = "sha256:b3931d51ba7e22d48bdd2ae56cad368f63db99ef74e8570021a7fd176b2ffc1f"}, + {file = "numpydantic-1.3.1-py3-none-any.whl", hash = "sha256:c0a37c093fcd0e4ed52c4556f4e804eec76fcf924c546e475509e662336f9f61"}, + {file = "numpydantic-1.3.1.tar.gz", hash = "sha256:d61868d7912f2dfee9906bd989399d74f470dee10d5028409c2f5d39529fc4af"}, ] [[package]] @@ -1831,29 +1831,29 @@ files = [ [[package]] name = "ruff" -version = "0.5.6" +version = "0.5.7" requires_python = ">=3.7" summary = "An extremely fast Python linter and code formatter, written in Rust." groups = ["dev"] files = [ - {file = "ruff-0.5.6-py3-none-linux_armv6l.whl", hash = "sha256:a0ef5930799a05522985b9cec8290b185952f3fcd86c1772c3bdbd732667fdcd"}, - {file = "ruff-0.5.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b652dc14f6ef5d1552821e006f747802cc32d98d5509349e168f6bf0ee9f8f42"}, - {file = "ruff-0.5.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:80521b88d26a45e871f31e4b88938fd87db7011bb961d8afd2664982dfc3641a"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9bc8f328a9f1309ae80e4d392836e7dbc77303b38ed4a7112699e63d3b066ab"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d394940f61f7720ad371ddedf14722ee1d6250fd8d020f5ea5a86e7be217daf"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111a99cdb02f69ddb2571e2756e017a1496c2c3a2aeefe7b988ddab38b416d36"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e395daba77a79f6dc0d07311f94cc0560375ca20c06f354c7c99af3bf4560c5d"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c476acb43c3c51e3c614a2e878ee1589655fa02dab19fe2db0423a06d6a5b1b6"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2ff8003f5252fd68425fd53d27c1f08b201d7ed714bb31a55c9ac1d4c13e2eb"}, - {file = "ruff-0.5.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c94e084ba3eaa80c2172918c2ca2eb2230c3f15925f4ed8b6297260c6ef179ad"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1f77c1c3aa0669fb230b06fb24ffa3e879391a3ba3f15e3d633a752da5a3e670"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f908148c93c02873210a52cad75a6eda856b2cbb72250370ce3afef6fb99b1ed"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:563a7ae61ad284187d3071d9041c08019975693ff655438d8d4be26e492760bd"}, - {file = "ruff-0.5.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:94fe60869bfbf0521e04fd62b74cbca21cbc5beb67cbb75ab33fe8c174f54414"}, - {file = "ruff-0.5.6-py3-none-win32.whl", hash = "sha256:e6a584c1de6f8591c2570e171cc7ce482bb983d49c70ddf014393cd39e9dfaed"}, - {file = "ruff-0.5.6-py3-none-win_amd64.whl", hash = "sha256:d7fe7dccb1a89dc66785d7aa0ac283b2269712d8ed19c63af908fdccca5ccc1a"}, - {file = "ruff-0.5.6-py3-none-win_arm64.whl", hash = "sha256:57c6c0dd997b31b536bff49b9eee5ed3194d60605a4427f735eeb1f9c1b8d264"}, - {file = "ruff-0.5.6.tar.gz", hash = "sha256:07c9e3c2a8e1fe377dd460371c3462671a728c981c3205a5217291422209f642"}, + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, ] [[package]] @@ -2090,44 +2090,44 @@ files = [ [[package]] name = "watchdog" -version = "4.0.1" +version = "4.0.2" requires_python = ">=3.8" summary = "Filesystem events monitoring" groups = ["default"] files = [ - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, - {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, - {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, - {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, - {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, - {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, - {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, - {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, - {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, - {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, + {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, + {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, + {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, + {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, + {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, + {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, + {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, + {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, + {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, ] [[package]] name = "webcolors" -version = "24.6.0" +version = "24.8.0" requires_python = ">=3.8" summary = "A library for working with the color formats defined by HTML and CSS." groups = ["default"] files = [ - {file = "webcolors-24.6.0-py3-none-any.whl", hash = "sha256:8cf5bc7e28defd1d48b9e83d5fc30741328305a8195c29a8e668fa45586568a1"}, - {file = "webcolors-24.6.0.tar.gz", hash = "sha256:1d160d1de46b3e81e58d0a280d0c78b467dc80f47294b91b1ad8029d2cedb55b"}, + {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, + {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, ] [[package]] @@ -2187,11 +2187,11 @@ files = [ [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" requires_python = ">=3.8" summary = "Backport of pathlib-compatible object wrapper for zip files" groups = ["dev", "plot", "tests"] files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index 97e4cce..8119a7f 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -20,7 +20,7 @@ dependencies = [ "pydantic-settings>=2.0.3", "tqdm>=4.66.1", 'typing-extensions>=4.12.2;python_version<"3.11"', - "numpydantic>=1.3.0", + "numpydantic>=1.3.1", "black>=24.4.2", "pandas>=2.2.2", ] diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index e09e68f..64f6a47 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -153,8 +153,8 @@ class Adapter(BaseModel): # SchemaAdapters that should be located under the same # NamespacesAdapter when it's important to query across SchemaAdapters, # so skip to avoid combinatoric walking - # if key == "imports" and type(input).__name__ == "SchemaAdapter": - # continue + if key == "imports" and type(input).__name__ == "SchemaAdapter": + continue val = getattr(input, key) yield (key, val) if isinstance(val, (BaseModel, dict, list)): diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index 109ce5b..35ae598 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -26,7 +26,13 @@ from linkml_runtime.utils.compile_python import file_text from linkml_runtime.utils.formatutils import remove_empty_items from linkml_runtime.utils.schemaview import SchemaView -from nwb_linkml.includes.hdmf import DYNAMIC_TABLE_IMPORTS, DYNAMIC_TABLE_INJECTS +from nwb_linkml.includes.base import BASEMODEL_GETITEM +from nwb_linkml.includes.hdmf import ( + DYNAMIC_TABLE_IMPORTS, + DYNAMIC_TABLE_INJECTS, + TSRVD_IMPORTS, + TSRVD_INJECTS, +) from nwb_linkml.includes.types import ModelTypeString, NamedImports, NamedString, _get_name OPTIONAL_PATTERN = re.compile(r"Optional\[([\w\.]*)\]") @@ -44,6 +50,7 @@ class NWBPydanticGenerator(PydanticGenerator): ' is stored in an NWB file")' ), 'object_id: Optional[str] = Field(None, description="Unique UUID for each object")', + BASEMODEL_GETITEM, ) split: bool = True imports: list[Import] = field(default_factory=lambda: [Import(module="numpy", alias="np")]) @@ -232,7 +239,7 @@ class AfterGenerateClass: Returns: """ - if cls.cls.name == "DynamicTable": + if cls.cls.name in "DynamicTable": cls.cls.bases = ["DynamicTableMixin"] if cls.injected_classes is None: @@ -254,6 +261,21 @@ class AfterGenerateClass: cls.cls.bases = ["DynamicTableRegionMixin", "VectorData"] elif cls.cls.name == "AlignedDynamicTable": cls.cls.bases = ["AlignedDynamicTableMixin", "DynamicTable"] + elif cls.cls.name == "TimeSeriesReferenceVectorData": + # in core.nwb.base, so need to inject and import again + cls.cls.bases = ["TimeSeriesReferenceVectorDataMixin", "VectorData"] + if cls.injected_classes is None: + cls.injected_classes = TSRVD_INJECTS.copy() + else: + cls.injected_classes.extend(TSRVD_INJECTS.copy()) + + if isinstance(cls.imports, Imports): + cls.imports += TSRVD_IMPORTS + elif isinstance(cls.imports, list): + cls.imports = Imports(imports=cls.imports) + TSRVD_IMPORTS + else: + cls.imports = TSRVD_IMPORTS.model_copy() + return cls diff --git a/nwb_linkml/src/nwb_linkml/includes/base.py b/nwb_linkml/src/nwb_linkml/includes/base.py new file mode 100644 index 0000000..ed69bf3 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/includes/base.py @@ -0,0 +1,14 @@ +""" +Modifications to the ConfiguredBaseModel used by all generated classes +""" + +BASEMODEL_GETITEM = """ + def __getitem__(self, val: Union[int, slice]) -> Any: + \"\"\"Try and get a value from value or "data" if we have it\"\"\" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") +""" diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 77b484c..b4a06d5 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -535,6 +535,109 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + + +class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): + """ + Mixin class for TimeSeriesReferenceVectorData - + very simple, just indexing the given timeseries object. + + These shouldn't have additional fields in them, just the three columns + for index, span, and timeseries + """ + + idx_start: NDArray[Any, int] + count: NDArray[Any, int] + timeseries: NDArray[Any, BaseModel] + + @model_validator(mode="after") + def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( + f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," + f" timeseries: {len(self.timeseries)}" + ) + return self + + def __len__(self) -> int: + """Since we have ensured equal length, just return idx_start""" + return len(self.idx_start) + + @overload + def _slice_helper(self, item: int) -> slice: ... + + @overload + def _slice_helper(self, item: slice) -> List[slice]: ... + + def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]: + if isinstance(item, (int, np.integer)): + return slice(self.idx_start[item], self.idx_start[item] + self.count[item]) + else: + starts = self.idx_start[item] + ends = starts + self.count[item] + return [slice(start, end) for start, end in zip(starts, ends)] + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + + if isinstance(item, (int, np.integer)): + return self.timeseries[self._slice_helper(item)] + elif isinstance(item, slice): + return [self.timeseries[subitem] for subitem in self._slice_helper(item)] + elif isinstance(item, Iterable): + return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + else: + raise ValueError( + f"Dont know how to index with {item}, must be an int, slice, or iterable" + ) + + def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + if isinstance(key, (int, np.integer)): + self.timeseries[self._slice_helper(key)] = value + elif isinstance(key, slice): + for subitem in self._slice_helper(key): + self.timeseries[subitem] = value + elif isinstance(key, Iterable): + for subitem in key: + self.timeseries[self._slice_helper(subitem)] = value + else: + raise ValueError( + f"Dont know how to index with {key}, must be an int, slice, or iterable" + ) + DYNAMIC_TABLE_IMPORTS = Imports( imports=[ @@ -577,3 +680,19 @@ DYNAMIC_TABLE_INJECTS = [ DynamicTableMixin, AlignedDynamicTableMixin, ] + +TSRVD_IMPORTS = Imports( + imports=[ + Import( + module="typing", + objects=[ + ObjectImport(name="overload"), + ObjectImport(name="Iterable"), + ObjectImport(name="Tuple"), + ], + ), + Import(module="pydantic", objects=[ObjectImport(name="model_validator")]), + ] +) +"""Imports for TimeSeriesReferenceVectorData""" +TSRVD_INJECTS = [VectorDataMixin, TimeSeriesReferenceVectorDataMixin] diff --git a/nwb_linkml/tests/test_adapters/test_adapter.py b/nwb_linkml/tests/test_adapters/test_adapter.py index e93ab5e..4514f5d 100644 --- a/nwb_linkml/tests/test_adapters/test_adapter.py +++ b/nwb_linkml/tests/test_adapters/test_adapter.py @@ -42,6 +42,7 @@ def test_walk_fields(nwb_core_fixture): dtype = list(nwb_core_fixture.walk_fields(nwb_core_fixture, "dtype")) dtype_havers = list(nwb_core_fixture.walk_types(nwb_core_fixture, (Dataset, Attribute))) + dtype_havers = [haver for haver in dtype_havers if haver.dtype is not None] compound_dtypes = [len(d.dtype) for d in dtype_havers if isinstance(d.dtype, list)] expected_dtypes = np.sum(compound_dtypes) + len(dtype_havers) assert expected_dtypes == len(dtype) diff --git a/nwb_linkml/tests/test_includes/conftest.py b/nwb_linkml/tests/test_includes/conftest.py index 9eacd9f..38f1b7c 100644 --- a/nwb_linkml/tests/test_includes/conftest.py +++ b/nwb_linkml/tests/test_includes/conftest.py @@ -15,6 +15,11 @@ from nwb_linkml.models import ( IntracellularResponsesTable, IntracellularStimuliTable, IntracellularRecordingsTable, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + TimeSeriesReferenceVectorData, ) @@ -102,19 +107,68 @@ def units(request) -> Tuple[Units, list[np.ndarray], np.ndarray]: return units, spike_times, spike_idx +def _icephys_stimulus_and_response( + i: int, electrode: IntracellularElectrode +) -> tuple[VoltageClampStimulusSeries, VoltageClampSeries]: + generator = np.random.default_rng() + n_samples = generator.integers(20, 50) + stimulus = VoltageClampStimulusSeries( + name=f"vcss_{i}", + data=VoltageClampStimulusSeriesData(value=[i] * n_samples), + stimulus_description=f"{i}", + sweep_number=i, + electrode=electrode, + ) + response = VoltageClampSeries( + name=f"vcs_{i}", + data=VoltageClampSeriesData(value=[i] * n_samples), + stimulus_description=f"{i}", + electrode=electrode, + ) + return stimulus, response + + @pytest.fixture() def intracellular_recordings_table() -> IntracellularRecordingsTable: n_recordings = 10 + generator = np.random.default_rng() device = Device(name="my device") electrode = IntracellularElectrode( name="my_electrode", description="an electrode", device=device ) + stims = [] + responses = [] + for i in range(n_recordings): + stim, response = _icephys_stimulus_and_response(i, electrode) + stims.append(stim) + responses.append(response) + electrodes = IntracellularElectrodesTable( name="intracellular_electrodes", electrode=[electrode] * n_recordings ) stimuli = IntracellularStimuliTable( name="intracellular_stimuli", + stimulus=TimeSeriesReferenceVectorData( + name="stimulus", + description="this should be optional", + idx_start=np.arange(n_recordings), + count=generator.integers(1, 10, (n_recordings,)), + timeseries=stims, + ), ) - responses = IntracellularResponsesTable() - recordings_table = IntracellularRecordingsTable() + responses = IntracellularResponsesTable( + name="intracellular_responses", + response=TimeSeriesReferenceVectorData( + name="response", + description="this should be optional", + idx_start=np.arange(n_recordings), + count=generator.integers(1, 10, (n_recordings,)), + timeseries=responses, + ), + ) + + recordings_table = IntracellularRecordingsTable( + electrodes=electrodes, stimuli=stimuli, responses=responses + ) + return recordings_table diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index b21e51a..f1c0af8 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -1,4 +1,5 @@ import numpy as np +import pandas as pd # FIXME: Make this just be the output of the provider by patching into import machinery from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( @@ -6,6 +7,7 @@ from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( DynamicTableRegion, ElectrodeGroup, VectorIndex, + VoltageClampStimulusSeries, ) from .conftest import _ragged_array @@ -159,3 +161,39 @@ def test_dynamictable_extra_coercion(): Extra fields should be coerced to VectorData and have their indexing relationships handled when passed as plain arrays. """ + + +def test_aligned_dynamictable(intracellular_recordings_table): + """ + Multiple aligned dynamictables should be indexable with a multiindex + """ + # can get a single row.. (check correctness below) + row = intracellular_recordings_table[0] + # can get a single table with its name + stimuli = intracellular_recordings_table["stimuli"] + assert stimuli.shape == (10, 1) + + # nab a few rows to make the dataframe + rows = intracellular_recordings_table[0:3] + assert all( + rows.columns + == pd.MultiIndex.from_tuples( + [ + ("electrodes", "index"), + ("electrodes", "electrode"), + ("stimuli", "index"), + ("stimuli", "stimulus"), + ("responses", "index"), + ("responses", "response"), + ] + ) + ) + + # ensure that we get the actual values from the TimeSeriesReferenceVectorData + # also tested separately + # each individual cell should be an array of VoltageClampStimulusSeries... + # and then we should be able to index within that as well + stims = rows["stimuli", "stimulus"][0] + for i in range(len(stims)): + assert isinstance(stims[i], VoltageClampStimulusSeries) + assert all([i == val for val in stims[i][:]]) From 6c07b87ba0df112d68cdbc1a18f840fa15054266 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 12 Aug 2024 22:57:32 -0700 Subject: [PATCH 40/61] regenerate models --- .../pydantic/core/v2_2_0/core_nwb_base.py | 9 + .../pydantic/core/v2_2_0/core_nwb_behavior.py | 9 + .../pydantic/core/v2_2_0/core_nwb_device.py | 9 + .../pydantic/core/v2_2_0/core_nwb_ecephys.py | 9 + .../pydantic/core/v2_2_0/core_nwb_epoch.py | 9 + .../pydantic/core/v2_2_0/core_nwb_file.py | 9 + .../pydantic/core/v2_2_0/core_nwb_icephys.py | 9 + .../pydantic/core/v2_2_0/core_nwb_image.py | 9 + .../pydantic/core/v2_2_0/core_nwb_misc.py | 9 + .../pydantic/core/v2_2_0/core_nwb_ogen.py | 9 + .../pydantic/core/v2_2_0/core_nwb_ophys.py | 9 + .../core/v2_2_0/core_nwb_retinotopy.py | 9 + .../models/pydantic/core/v2_2_0/namespace.py | 9 + .../pydantic/core/v2_2_1/core_nwb_base.py | 9 + .../pydantic/core/v2_2_1/core_nwb_behavior.py | 9 + .../pydantic/core/v2_2_1/core_nwb_device.py | 9 + .../pydantic/core/v2_2_1/core_nwb_ecephys.py | 9 + .../pydantic/core/v2_2_1/core_nwb_epoch.py | 9 + .../pydantic/core/v2_2_1/core_nwb_file.py | 9 + .../pydantic/core/v2_2_1/core_nwb_icephys.py | 9 + .../pydantic/core/v2_2_1/core_nwb_image.py | 9 + .../pydantic/core/v2_2_1/core_nwb_misc.py | 9 + .../pydantic/core/v2_2_1/core_nwb_ogen.py | 9 + .../pydantic/core/v2_2_1/core_nwb_ophys.py | 9 + .../core/v2_2_1/core_nwb_retinotopy.py | 9 + .../models/pydantic/core/v2_2_1/namespace.py | 9 + .../pydantic/core/v2_2_2/core_nwb_base.py | 9 + .../pydantic/core/v2_2_2/core_nwb_behavior.py | 9 + .../pydantic/core/v2_2_2/core_nwb_device.py | 9 + .../pydantic/core/v2_2_2/core_nwb_ecephys.py | 9 + .../pydantic/core/v2_2_2/core_nwb_epoch.py | 9 + .../pydantic/core/v2_2_2/core_nwb_file.py | 9 + .../pydantic/core/v2_2_2/core_nwb_icephys.py | 9 + .../pydantic/core/v2_2_2/core_nwb_image.py | 9 + .../pydantic/core/v2_2_2/core_nwb_misc.py | 9 + .../pydantic/core/v2_2_2/core_nwb_ogen.py | 9 + .../pydantic/core/v2_2_2/core_nwb_ophys.py | 9 + .../core/v2_2_2/core_nwb_retinotopy.py | 9 + .../models/pydantic/core/v2_2_2/namespace.py | 9 + .../pydantic/core/v2_2_4/core_nwb_base.py | 9 + .../pydantic/core/v2_2_4/core_nwb_behavior.py | 9 + .../pydantic/core/v2_2_4/core_nwb_device.py | 9 + .../pydantic/core/v2_2_4/core_nwb_ecephys.py | 9 + .../pydantic/core/v2_2_4/core_nwb_epoch.py | 9 + .../pydantic/core/v2_2_4/core_nwb_file.py | 9 + .../pydantic/core/v2_2_4/core_nwb_icephys.py | 9 + .../pydantic/core/v2_2_4/core_nwb_image.py | 9 + .../pydantic/core/v2_2_4/core_nwb_misc.py | 9 + .../pydantic/core/v2_2_4/core_nwb_ogen.py | 9 + .../pydantic/core/v2_2_4/core_nwb_ophys.py | 9 + .../core/v2_2_4/core_nwb_retinotopy.py | 9 + .../models/pydantic/core/v2_2_4/namespace.py | 9 + .../pydantic/core/v2_2_5/core_nwb_base.py | 9 + .../pydantic/core/v2_2_5/core_nwb_behavior.py | 9 + .../pydantic/core/v2_2_5/core_nwb_device.py | 9 + .../pydantic/core/v2_2_5/core_nwb_ecephys.py | 9 + .../pydantic/core/v2_2_5/core_nwb_epoch.py | 9 + .../pydantic/core/v2_2_5/core_nwb_file.py | 9 + .../pydantic/core/v2_2_5/core_nwb_icephys.py | 9 + .../pydantic/core/v2_2_5/core_nwb_image.py | 9 + .../pydantic/core/v2_2_5/core_nwb_misc.py | 9 + .../pydantic/core/v2_2_5/core_nwb_ogen.py | 9 + .../pydantic/core/v2_2_5/core_nwb_ophys.py | 9 + .../core/v2_2_5/core_nwb_retinotopy.py | 9 + .../models/pydantic/core/v2_2_5/namespace.py | 9 + .../pydantic/core/v2_3_0/core_nwb_base.py | 9 + .../pydantic/core/v2_3_0/core_nwb_behavior.py | 9 + .../pydantic/core/v2_3_0/core_nwb_device.py | 9 + .../pydantic/core/v2_3_0/core_nwb_ecephys.py | 9 + .../pydantic/core/v2_3_0/core_nwb_epoch.py | 9 + .../pydantic/core/v2_3_0/core_nwb_file.py | 9 + .../pydantic/core/v2_3_0/core_nwb_icephys.py | 9 + .../pydantic/core/v2_3_0/core_nwb_image.py | 9 + .../pydantic/core/v2_3_0/core_nwb_misc.py | 9 + .../pydantic/core/v2_3_0/core_nwb_ogen.py | 9 + .../pydantic/core/v2_3_0/core_nwb_ophys.py | 9 + .../core/v2_3_0/core_nwb_retinotopy.py | 9 + .../models/pydantic/core/v2_3_0/namespace.py | 9 + .../pydantic/core/v2_4_0/core_nwb_base.py | 145 +++++++++++++++- .../pydantic/core/v2_4_0/core_nwb_behavior.py | 9 + .../pydantic/core/v2_4_0/core_nwb_device.py | 9 + .../pydantic/core/v2_4_0/core_nwb_ecephys.py | 9 + .../pydantic/core/v2_4_0/core_nwb_epoch.py | 9 + .../pydantic/core/v2_4_0/core_nwb_file.py | 9 + .../pydantic/core/v2_4_0/core_nwb_icephys.py | 9 + .../pydantic/core/v2_4_0/core_nwb_image.py | 9 + .../pydantic/core/v2_4_0/core_nwb_misc.py | 9 + .../pydantic/core/v2_4_0/core_nwb_ogen.py | 9 + .../pydantic/core/v2_4_0/core_nwb_ophys.py | 9 + .../core/v2_4_0/core_nwb_retinotopy.py | 9 + .../models/pydantic/core/v2_4_0/namespace.py | 9 + .../pydantic/core/v2_5_0/core_nwb_base.py | 157 ++++++++++++++++- .../pydantic/core/v2_5_0/core_nwb_behavior.py | 9 + .../pydantic/core/v2_5_0/core_nwb_device.py | 9 + .../pydantic/core/v2_5_0/core_nwb_ecephys.py | 9 + .../pydantic/core/v2_5_0/core_nwb_epoch.py | 9 + .../pydantic/core/v2_5_0/core_nwb_file.py | 9 + .../pydantic/core/v2_5_0/core_nwb_icephys.py | 9 + .../pydantic/core/v2_5_0/core_nwb_image.py | 9 + .../pydantic/core/v2_5_0/core_nwb_misc.py | 9 + .../pydantic/core/v2_5_0/core_nwb_ogen.py | 9 + .../pydantic/core/v2_5_0/core_nwb_ophys.py | 9 + .../core/v2_5_0/core_nwb_retinotopy.py | 9 + .../models/pydantic/core/v2_5_0/namespace.py | 9 + .../core/v2_6_0_alpha/core_nwb_base.py | 157 ++++++++++++++++- .../core/v2_6_0_alpha/core_nwb_behavior.py | 9 + .../core/v2_6_0_alpha/core_nwb_device.py | 9 + .../core/v2_6_0_alpha/core_nwb_ecephys.py | 9 + .../core/v2_6_0_alpha/core_nwb_epoch.py | 9 + .../core/v2_6_0_alpha/core_nwb_file.py | 9 + .../core/v2_6_0_alpha/core_nwb_icephys.py | 9 + .../core/v2_6_0_alpha/core_nwb_image.py | 9 + .../core/v2_6_0_alpha/core_nwb_misc.py | 9 + .../core/v2_6_0_alpha/core_nwb_ogen.py | 9 + .../core/v2_6_0_alpha/core_nwb_ophys.py | 9 + .../core/v2_6_0_alpha/core_nwb_retinotopy.py | 9 + .../pydantic/core/v2_6_0_alpha/namespace.py | 9 + .../pydantic/core/v2_7_0/core_nwb_base.py | 163 +++++++++++++++++- .../pydantic/core/v2_7_0/core_nwb_behavior.py | 9 + .../pydantic/core/v2_7_0/core_nwb_device.py | 9 + .../pydantic/core/v2_7_0/core_nwb_ecephys.py | 9 + .../pydantic/core/v2_7_0/core_nwb_epoch.py | 9 + .../pydantic/core/v2_7_0/core_nwb_file.py | 9 + .../pydantic/core/v2_7_0/core_nwb_icephys.py | 9 + .../pydantic/core/v2_7_0/core_nwb_image.py | 9 + .../pydantic/core/v2_7_0/core_nwb_misc.py | 9 + .../pydantic/core/v2_7_0/core_nwb_ogen.py | 9 + .../pydantic/core/v2_7_0/core_nwb_ophys.py | 9 + .../core/v2_7_0/core_nwb_retinotopy.py | 9 + .../models/pydantic/core/v2_7_0/namespace.py | 9 + .../hdmf_common/v1_1_0/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_1_0/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_1_0/namespace.py | 9 + .../hdmf_common/v1_1_2/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_1_2/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_1_2/namespace.py | 9 + .../hdmf_common/v1_1_3/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_1_3/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_1_3/namespace.py | 9 + .../hdmf_common/v1_2_0/hdmf_common_base.py | 9 + .../hdmf_common/v1_2_0/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_2_0/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_2_0/namespace.py | 9 + .../hdmf_common/v1_2_1/hdmf_common_base.py | 9 + .../hdmf_common/v1_2_1/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_2_1/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_2_1/namespace.py | 9 + .../hdmf_common/v1_3_0/hdmf_common_base.py | 9 + .../v1_3_0/hdmf_common_resources.py | 9 + .../hdmf_common/v1_3_0/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_3_0/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_3_0/namespace.py | 9 + .../hdmf_common/v1_4_0/hdmf_common_base.py | 9 + .../hdmf_common/v1_4_0/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_4_0/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_4_0/namespace.py | 9 + .../hdmf_common/v1_5_0/hdmf_common_base.py | 9 + .../hdmf_common/v1_5_0/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_5_0/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_5_0/namespace.py | 9 + .../hdmf_common/v1_5_1/hdmf_common_base.py | 9 + .../hdmf_common/v1_5_1/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_5_1/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_5_1/namespace.py | 9 + .../hdmf_common/v1_6_0/hdmf_common_base.py | 9 + .../hdmf_common/v1_6_0/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_6_0/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_6_0/namespace.py | 9 + .../hdmf_common/v1_7_0/hdmf_common_base.py | 9 + .../hdmf_common/v1_7_0/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_7_0/hdmf_common_table.py | 9 + .../pydantic/hdmf_common/v1_7_0/namespace.py | 9 + .../hdmf_common/v1_8_0/hdmf_common_base.py | 9 + .../hdmf_common/v1_8_0/hdmf_common_sparse.py | 9 + .../hdmf_common/v1_8_0/hdmf_common_table.py | 44 ++++- .../pydantic/hdmf_common/v1_8_0/namespace.py | 9 + .../v0_1_0/hdmf_experimental_experimental.py | 9 + .../v0_1_0/hdmf_experimental_resources.py | 9 + .../hdmf_experimental/v0_1_0/namespace.py | 9 + .../v0_2_0/hdmf_experimental_experimental.py | 9 + .../v0_2_0/hdmf_experimental_resources.py | 9 + .../hdmf_experimental/v0_2_0/namespace.py | 9 + .../v0_3_0/hdmf_experimental_experimental.py | 9 + .../v0_3_0/hdmf_experimental_resources.py | 9 + .../hdmf_experimental/v0_3_0/namespace.py | 9 + .../v0_4_0/hdmf_experimental_experimental.py | 9 + .../v0_4_0/hdmf_experimental_resources.py | 9 + .../hdmf_experimental/v0_4_0/namespace.py | 9 + .../v0_5_0/hdmf_experimental_experimental.py | 9 + .../v0_5_0/hdmf_experimental_resources.py | 9 + .../hdmf_experimental/v0_5_0/namespace.py | 9 + 191 files changed, 2318 insertions(+), 22 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py index 99b5906..6cb19f8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py index addd1ff..095dec1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py index 1d43d1b..0456ec3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index d1a96d1..c946ad9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py index 8d0fb5d..33abf7e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py index f0e65b0..3bd6ec5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -41,6 +41,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py index 938bf63..f09b466 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py index 77e40f9..28ff7b4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py index 6c16a46..b1e0fe6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py index 4a46e2c..2bbed5f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py index 433c454..5321376 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -45,6 +45,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index f63232e..3ee80c2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py index 0456ea1..e8892b9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py @@ -155,6 +155,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py index c8c1162..45c2131 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py index 45af85e..04c20b4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py index 83d2f3c..bc309fc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index 92ea135..0a13c81 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py index 02fc4d9..3e84ef3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py index a17f8e1..199bbda 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -41,6 +41,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py index 46321df..9c49daa 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py index 97e4aa8..3322ff3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py index 60a591f..c547aef 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py index e94dead..07d8693 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py index 552d38b..587b5ee 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -45,6 +45,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index 7969ed1..eef6b41 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py index c5d3cba..b5d693b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py @@ -155,6 +155,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py index 8dbf06a..4b73640 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py index 9984cf5..aa5631c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py index 8f59409..dd0ab6e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index d757e1a..402ccda 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py index 083c41f..35bfa64 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py index ef18f50..c05c351 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -41,6 +41,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py index 2b126cd..5cc88ed 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py index 2bafac7..88c0781 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py index d9914b5..1f41641 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py index cfd0933..e7823a4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py index f4f9f20..88bb254 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -45,6 +45,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index 976d7f2..e623a0b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py index 7b96622..e9737e4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py @@ -158,6 +158,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py index 4e8eeca..1d0f436 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py index 9609061..e095079 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py index fc0ff49..ed9623b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index 7bfffac..e5b05d6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 6138ade..8012a74 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py index 4e54cb8..24f0613 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py index d24c902..4272b2e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py index 6d493d6..40370ff 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py index 6d705b1..dc6d0a6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py index 9616063..33f8506 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py index 9c96489..d7adad4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index f7b47de..af820b1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py index 2017869..d4744f0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py @@ -165,6 +165,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py index 99ff2d4..8c121de 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py index da7c068..6b298ff 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py index afc24d2..2b32c15 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index 4bd8b19..5869306 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py index e66cf1a..4a4f076 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py index 1aab54d..21932b7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py index 94cf0cf..2dd0607 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py index 98d7ef4..483dfd5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py index 4fe4673..b9ef472 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py index c419baf..ead9c1a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py index 9b0d75e..1b006ba 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index 3b83056..916660c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py index 56b42a0..ce33adb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py @@ -165,6 +165,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py index 8ce2390..4fd6a4a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -29,6 +29,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py index 9c8a529..898519c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py index 0640dac..ec6a770 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index 29faa7b..72ca241 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py index b6d6397..ff49854 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py index 987b15e..dcd3d20 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py index a439eb5..bac67ab 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py index 8425dcc..81ce140 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -29,6 +29,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py index 14df410..6e382f4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py index 56f099a..e77547e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py index d6c991a..637d869 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 26e6c7d..85857c3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py index fe306d5..6bb4f8d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py @@ -168,6 +168,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py index aa29c83..2b85c2f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -4,9 +4,9 @@ from decimal import Decimal from enum import Enum import re import sys -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union -from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator import numpy as np +from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Iterable, Tuple +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container from numpydantic import NDArray, Shape from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable @@ -29,6 +29,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -48,6 +57,136 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" + + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + + +class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): + """ + Mixin class for TimeSeriesReferenceVectorData - + very simple, just indexing the given timeseries object. + + These shouldn't have additional fields in them, just the three columns + for index, span, and timeseries + """ + + idx_start: NDArray[Any, int] + count: NDArray[Any, int] + timeseries: NDArray[Any, BaseModel] + + @model_validator(mode="after") + def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( + f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," + f" timeseries: {len(self.timeseries)}" + ) + return self + + def __len__(self) -> int: + """Since we have ensured equal length, just return idx_start""" + return len(self.idx_start) + + @overload + def _slice_helper(self, item: int) -> slice: ... + + @overload + def _slice_helper(self, item: slice) -> List[slice]: ... + + def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]: + if isinstance(item, (int, np.integer)): + return slice(self.idx_start[item], self.idx_start[item] + self.count[item]) + else: + starts = self.idx_start[item] + ends = starts + self.count[item] + return [slice(start, end) for start, end in zip(starts, ends)] + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + + if isinstance(item, (int, np.integer)): + return self.timeseries[self._slice_helper(item)] + elif isinstance(item, slice): + return [self.timeseries[subitem] for subitem in self._slice_helper(item)] + elif isinstance(item, Iterable): + return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + else: + raise ValueError( + f"Dont know how to index with {item}, must be an int, slice, or iterable" + ) + + def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + if isinstance(key, (int, np.integer)): + self.timeseries[self._slice_helper(key)] = value + elif isinstance(key, slice): + for subitem in self._slice_helper(key): + self.timeseries[subitem] = value + elif isinstance(key, Iterable): + for subitem in key: + self.timeseries[self._slice_helper(subitem)] = value + else: + raise ValueError( + f"Dont know how to index with {key}, must be an int, slice, or iterable" + ) + + linkml_meta = LinkMLMeta( { "annotations": { @@ -78,7 +217,7 @@ class NWBData(Data): name: str = Field(...) -class TimeSeriesReferenceVectorData(VectorData): +class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin, VectorData): """ Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py index 7251865..98282c5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py index f59deb8..f54c25e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index 30e8b3e..de74f33 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 1161d72..c4415c8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py index b74e8b8..8ddb92c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py index 11318b3..cd08504 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py index 631f809..c792b06 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -29,6 +29,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py index 0828ebf..7c6183f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py index da627b1..d9184d4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 2871455..96a8013 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index 825df96..1b06207 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py index bc7052a..ecb9186 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py @@ -181,6 +181,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py index 8ab4e6f..7973568 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -8,13 +8,28 @@ import numpy as np from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container from numpydantic import NDArray, Shape from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + overload, + Iterable, + Tuple, + Annotated, + Type, + TypeVar, +) from pydantic import ( BaseModel, ConfigDict, Field, RootModel, field_validator, + model_validator, ValidationInfo, BeforeValidator, ) @@ -37,6 +52,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -57,6 +81,135 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + + +class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): + """ + Mixin class for TimeSeriesReferenceVectorData - + very simple, just indexing the given timeseries object. + + These shouldn't have additional fields in them, just the three columns + for index, span, and timeseries + """ + + idx_start: NDArray[Any, int] + count: NDArray[Any, int] + timeseries: NDArray[Any, BaseModel] + + @model_validator(mode="after") + def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( + f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," + f" timeseries: {len(self.timeseries)}" + ) + return self + + def __len__(self) -> int: + """Since we have ensured equal length, just return idx_start""" + return len(self.idx_start) + + @overload + def _slice_helper(self, item: int) -> slice: ... + + @overload + def _slice_helper(self, item: slice) -> List[slice]: ... + + def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]: + if isinstance(item, (int, np.integer)): + return slice(self.idx_start[item], self.idx_start[item] + self.count[item]) + else: + starts = self.idx_start[item] + ends = starts + self.count[item] + return [slice(start, end) for start, end in zip(starts, ends)] + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + + if isinstance(item, (int, np.integer)): + return self.timeseries[self._slice_helper(item)] + elif isinstance(item, slice): + return [self.timeseries[subitem] for subitem in self._slice_helper(item)] + elif isinstance(item, Iterable): + return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + else: + raise ValueError( + f"Dont know how to index with {item}, must be an int, slice, or iterable" + ) + + def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + if isinstance(key, (int, np.integer)): + self.timeseries[self._slice_helper(key)] = value + elif isinstance(key, slice): + for subitem in self._slice_helper(key): + self.timeseries[subitem] = value + elif isinstance(key, Iterable): + for subitem in key: + self.timeseries[self._slice_helper(subitem)] = value + else: + raise ValueError( + f"Dont know how to index with {key}, must be an int, slice, or iterable" + ) + + ModelType = TypeVar("ModelType", bound=Type[BaseModel]) @@ -102,7 +255,7 @@ class NWBData(Data): name: str = Field(...) -class TimeSeriesReferenceVectorData(VectorData): +class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin, VectorData): """ Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py index c295739..86b4bc0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py index 53128f3..4b214a9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index f5e12e0..7c60b61 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py index 26b4717..6091bbf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py index dc3dd77..e09f4fb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -51,6 +51,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py index e802bcf..a7175f4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py index d92729b..8dbbb44 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -35,6 +35,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py index 1459133..e463240 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py index 0cfb470..f397977 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py index fbc5c57..c80c328 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index 7a3b066..90bd8c1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py index ce0dfe0..ec00173 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py @@ -182,6 +182,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 3151433..6cf4107 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -8,13 +8,28 @@ import numpy as np from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container from numpydantic import NDArray, Shape from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + overload, + Iterable, + Tuple, + Annotated, + Type, + TypeVar, +) from pydantic import ( BaseModel, ConfigDict, Field, RootModel, field_validator, + model_validator, ValidationInfo, BeforeValidator, ) @@ -37,6 +52,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -57,6 +81,135 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + + +class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): + """ + Mixin class for TimeSeriesReferenceVectorData - + very simple, just indexing the given timeseries object. + + These shouldn't have additional fields in them, just the three columns + for index, span, and timeseries + """ + + idx_start: NDArray[Any, int] + count: NDArray[Any, int] + timeseries: NDArray[Any, BaseModel] + + @model_validator(mode="after") + def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( + f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," + f" timeseries: {len(self.timeseries)}" + ) + return self + + def __len__(self) -> int: + """Since we have ensured equal length, just return idx_start""" + return len(self.idx_start) + + @overload + def _slice_helper(self, item: int) -> slice: ... + + @overload + def _slice_helper(self, item: slice) -> List[slice]: ... + + def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]: + if isinstance(item, (int, np.integer)): + return slice(self.idx_start[item], self.idx_start[item] + self.count[item]) + else: + starts = self.idx_start[item] + ends = starts + self.count[item] + return [slice(start, end) for start, end in zip(starts, ends)] + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + + if isinstance(item, (int, np.integer)): + return self.timeseries[self._slice_helper(item)] + elif isinstance(item, slice): + return [self.timeseries[subitem] for subitem in self._slice_helper(item)] + elif isinstance(item, Iterable): + return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + else: + raise ValueError( + f"Dont know how to index with {item}, must be an int, slice, or iterable" + ) + + def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + if isinstance(key, (int, np.integer)): + self.timeseries[self._slice_helper(key)] = value + elif isinstance(key, slice): + for subitem in self._slice_helper(key): + self.timeseries[subitem] = value + elif isinstance(key, Iterable): + for subitem in key: + self.timeseries[self._slice_helper(subitem)] = value + else: + raise ValueError( + f"Dont know how to index with {key}, must be an int, slice, or iterable" + ) + + ModelType = TypeVar("ModelType", bound=Type[BaseModel]) @@ -102,7 +255,7 @@ class NWBData(Data): name: str = Field(...) -class TimeSeriesReferenceVectorData(VectorData): +class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin, VectorData): """ Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index 08d5d2b..508ddf8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py index deeda97..544b533 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index 40d3c1a..2c241ec 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index 92e28fa..6095a6f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index be8be33..19bdf9c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -51,6 +51,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index e6cb759..731a452 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index 07a02dc..ed4d986 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -35,6 +35,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index 93d732d..18065b1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index 66baef6..0371f5d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index cfc2f60..dc54f7c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index 75922c5..d454105 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py index 6cb7862..c0c5da0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py @@ -184,6 +184,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py index f84cee5..430e080 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -1,20 +1,29 @@ from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys import numpy as np from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container from numpydantic import NDArray, Shape from ...hdmf_common.v1_8_0.hdmf_common_table import VectorData, DynamicTable -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Annotated, Type, TypeVar +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + overload, + Iterable, + Tuple, + Annotated, + Type, + TypeVar, +) from pydantic import ( BaseModel, ConfigDict, Field, RootModel, - field_validator, + model_validator, ValidationInfo, BeforeValidator, ) @@ -37,6 +46,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -57,6 +75,135 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" + +class VectorDataMixin(BaseModel): + """ + Mixin class to give VectorData indexing abilities + """ + + _index: Optional["VectorIndex"] = None + + # redefined in `VectorData`, but included here for testing and type checking + value: Optional[NDArray] = None + + def __init__(self, value: Optional[NDArray] = None, **kwargs): + if value is not None and "value" not in kwargs: + kwargs["value"] = value + super().__init__(**kwargs) + + def __getitem__(self, item: Union[str, int, slice, Tuple[Union[str, int, slice], ...]]) -> Any: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + return self._index[item] + else: + return self.value[item] + + def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: + if self._index: + # Following hdmf, VectorIndex is the thing that knows how to do the slicing + self._index[key] = value + else: + self.value[key] = value + + def __getattr__(self, item: str) -> Any: + """ + Forward getattr to ``value`` + """ + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self.value, item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use index as length, if present + """ + if self._index: + return len(self._index) + else: + return len(self.value) + + +class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): + """ + Mixin class for TimeSeriesReferenceVectorData - + very simple, just indexing the given timeseries object. + + These shouldn't have additional fields in them, just the three columns + for index, span, and timeseries + """ + + idx_start: NDArray[Any, int] + count: NDArray[Any, int] + timeseries: NDArray[Any, BaseModel] + + @model_validator(mode="after") + def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( + f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," + f" timeseries: {len(self.timeseries)}" + ) + return self + + def __len__(self) -> int: + """Since we have ensured equal length, just return idx_start""" + return len(self.idx_start) + + @overload + def _slice_helper(self, item: int) -> slice: ... + + @overload + def _slice_helper(self, item: slice) -> List[slice]: ... + + def _slice_helper(self, item: Union[int, slice]) -> Union[slice, List[slice]]: + if isinstance(item, (int, np.integer)): + return slice(self.idx_start[item], self.idx_start[item] + self.count[item]) + else: + starts = self.idx_start[item] + ends = starts + self.count[item] + return [slice(start, end) for start, end in zip(starts, ends)] + + def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + + if isinstance(item, (int, np.integer)): + return self.timeseries[self._slice_helper(item)] + elif isinstance(item, slice): + return [self.timeseries[subitem] for subitem in self._slice_helper(item)] + elif isinstance(item, Iterable): + return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + else: + raise ValueError( + f"Dont know how to index with {item}, must be an int, slice, or iterable" + ) + + def __setitem__(self, key: Union[int, slice, Iterable], value: Any) -> None: + if self._index is not None: + raise NotImplementedError( + "VectorIndexing with TimeSeriesReferenceVectorData is not supported because it is" + " never done in the core schema." + ) + if isinstance(key, (int, np.integer)): + self.timeseries[self._slice_helper(key)] = value + elif isinstance(key, slice): + for subitem in self._slice_helper(key): + self.timeseries[subitem] = value + elif isinstance(key, Iterable): + for subitem in key: + self.timeseries[self._slice_helper(subitem)] = value + else: + raise ValueError( + f"Dont know how to index with {key}, must be an int, slice, or iterable" + ) + + ModelType = TypeVar("ModelType", bound=Type[BaseModel]) @@ -102,7 +249,7 @@ class NWBData(Data): name: str = Field(...) -class TimeSeriesReferenceVectorData(VectorData): +class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin, VectorData): """ Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. """ diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py index e8c8a80..780e83a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py index 0abc50a..24e2e67 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py index aed71ed..17de2b5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py index 90c2524..d10191e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -37,6 +37,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py index ef1a1e6..950faa5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -51,6 +51,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py index 93a555a..86754a8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py index 69a8617..c2b5aff 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -35,6 +35,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py index edf6336..2739d41 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py index d77dcba..881aea0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -34,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py index 6e4b60f..04dcee9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -50,6 +50,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py index 0942bac..1d8b514 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py index 4f98b35..c6c0f39 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py @@ -185,6 +185,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py index af77c6c..8cd8423 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index a37ac18..9d5b38f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -38,6 +38,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py index 703fefe..d0a1b95 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py @@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py index 8c05020..f0d3be3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 2a0c39a..825e522 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -38,6 +38,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py index a9507d4..13ff59e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py @@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py index c473269..7e9fa34 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 57472fa..e837810 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -38,6 +38,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py index e8dac61..284e138 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py @@ -42,6 +42,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py index 1d657d9..0880d00 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_base.py @@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py index 6cf3e21..0029140 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_sparse.py @@ -27,6 +27,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index cbefb40..7c232b0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -39,6 +39,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py index 62d22cb..7314aa8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/namespace.py @@ -41,6 +41,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py index e75baf7..60eea59 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_base.py @@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py index 62bc6ef..ebdac35 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_sparse.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index c62bd06..9c4e909 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -39,6 +39,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py index 55f5dc6..7691d2a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/namespace.py @@ -41,6 +41,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py index 83003ce..1752575 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_base.py @@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py index 57a1938..9331ccc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_resources.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py index 3d4d4af..e01d80e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_sparse.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index c0e6522..f77e052 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -39,6 +39,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py index a2dcc70..ef79cd9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/namespace.py @@ -43,6 +43,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py index 3adb8b8..9d878a5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_base.py @@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py index f304f3a..00a62c7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_sparse.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index d3599bb..6514e14 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -39,6 +39,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py index db59f28..43432b8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/namespace.py @@ -35,6 +35,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py index 29c6ea2..5545403 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py @@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py index e261b0a..f07bed2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 946d90f..b64b835 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -39,6 +39,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py index b9cf9ac..6d03d3d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py @@ -36,6 +36,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py index e360db2..737fa66 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_base.py @@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py index 30f0c4b..41d0d3a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_sparse.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 95d594e..3593099 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -39,6 +39,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py index 836122e..1676f7c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/namespace.py @@ -36,6 +36,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py index 49293d1..21354d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_base.py @@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py index ce6c0ed..bc4a505 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_sparse.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index ea5d7b1..b5d6b93 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -39,6 +39,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py index 1dc832f..68060f7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/namespace.py @@ -36,6 +36,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py index 51a093f..ec81b87 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_base.py @@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py index f59d4a7..3bfd4a8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_sparse.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 3c4c993..51820ad 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -39,6 +39,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py index 7d70e39..56b9f0d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/namespace.py @@ -36,6 +36,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py index f125a10..0e61a5c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py @@ -26,6 +26,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py index 248869e..aef8124 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 60bbadf..6104fd5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -1,12 +1,7 @@ from __future__ import annotations -from datetime import datetime, date -from decimal import Decimal -from enum import Enum -import re -import sys -from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container +from ...hdmf_common.v1_8_0.hdmf_common_base import Data import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import Any, ClassVar, List, Dict, Optional, Union, Iterable, Tuple, overload from pydantic import ( BaseModel, ConfigDict, @@ -39,6 +34,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} @@ -561,6 +565,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py index 8b9bf5b..66dcf89 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py @@ -36,6 +36,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index 913bcfc..9a95aea 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index 6b1a964..fdeb151 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py index 7ea10f7..a9c5e62 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py @@ -44,6 +44,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py index 63f582a..01b7693 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_experimental.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py index 810f8ef..81b0840 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/hdmf_experimental_resources.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py index 1345536..5c8e028 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_2_0/namespace.py @@ -45,6 +45,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py index 92d347a..effed9e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_experimental.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py index 4404c7f..52e554a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/hdmf_experimental_resources.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py index 8361004..bf78d15 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_3_0/namespace.py @@ -45,6 +45,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py index 0402b68..49f56f2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_experimental.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py index 7eef961..acfc7df 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/hdmf_experimental_resources.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py index c642308..2422a59 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_4_0/namespace.py @@ -46,6 +46,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py index 57e1a37..263e7b2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py index 0adfcc3..9cbbb79 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py @@ -28,6 +28,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py index 8f32985..e35c690 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py @@ -46,6 +46,15 @@ class ConfiguredBaseModel(BaseModel): ) object_id: Optional[str] = Field(None, description="Unique UUID for each object") + def __getitem__(self, val: Union[int, slice]) -> Any: + """Try and get a value from value or "data" if we have it""" + if hasattr(self, "value") and self.value is not None: + return self.value[val] + elif hasattr(self, "data") and self.data is not None: + return self.data[val] + else: + raise KeyError("No value or data field to index from") + class LinkMLMeta(RootModel): root: Dict[str, Any] = {} From 0a931957267e4eb264c6f40e6f3443e08d0b2157 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Mon, 12 Aug 2024 22:59:15 -0700 Subject: [PATCH 41/61] lint --- nwb_linkml/src/nwb_linkml/adapters/adapter.py | 3 ++- nwb_linkml/src/nwb_linkml/config.py | 2 +- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 3 +++ nwb_linkml/tests/test_includes/conftest.py | 12 ++++++------ nwb_linkml/tests/test_includes/test_hdmf.py | 1 + 5 files changed, 13 insertions(+), 8 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/adapters/adapter.py b/nwb_linkml/src/nwb_linkml/adapters/adapter.py index 64f6a47..13e86fd 100644 --- a/nwb_linkml/src/nwb_linkml/adapters/adapter.py +++ b/nwb_linkml/src/nwb_linkml/adapters/adapter.py @@ -5,8 +5,8 @@ Base class for adapters import sys from abc import abstractmethod from dataclasses import dataclass, field -from typing import Any, Generator, List, Literal, Optional, Tuple, Type, TypeVar, Union, overload from logging import Logger +from typing import Any, Generator, List, Literal, Optional, Tuple, Type, TypeVar, Union, overload from linkml_runtime.dumpers import yaml_dumper from linkml_runtime.linkml_model import ( @@ -104,6 +104,7 @@ class Adapter(BaseModel): @property def logger(self) -> Logger: + """A logger with the name of the adapter class! See :class:`.config`""" if self._logger is None: self._logger = init_logger(self.__class__.__name__) return self._logger diff --git a/nwb_linkml/src/nwb_linkml/config.py b/nwb_linkml/src/nwb_linkml/config.py index bbfcaed..6bea3d5 100644 --- a/nwb_linkml/src/nwb_linkml/config.py +++ b/nwb_linkml/src/nwb_linkml/config.py @@ -2,9 +2,9 @@ Manage the operation of nwb_linkml from environmental variables """ -from typing import Optional, Literal import tempfile from pathlib import Path +from typing import Literal, Optional from pydantic import ( BaseModel, diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index b4a06d5..2ddf2cc 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -577,6 +577,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + """ + Each of the three indexing columns must be the same length to work! + """ assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," f" timeseries: {len(self.timeseries)}" diff --git a/nwb_linkml/tests/test_includes/conftest.py b/nwb_linkml/tests/test_includes/conftest.py index 38f1b7c..67011ec 100644 --- a/nwb_linkml/tests/test_includes/conftest.py +++ b/nwb_linkml/tests/test_includes/conftest.py @@ -4,22 +4,22 @@ import numpy as np import pytest from nwb_linkml.models import ( - ElectricalSeries, - ExtracellularEphysElectrodes, Device, - ElectrodeGroup, DynamicTableRegion, - Units, + ElectricalSeries, + ElectrodeGroup, + ExtracellularEphysElectrodes, IntracellularElectrode, IntracellularElectrodesTable, + IntracellularRecordingsTable, IntracellularResponsesTable, IntracellularStimuliTable, - IntracellularRecordingsTable, + TimeSeriesReferenceVectorData, + Units, VoltageClampSeries, VoltageClampSeriesData, VoltageClampStimulusSeries, VoltageClampStimulusSeriesData, - TimeSeriesReferenceVectorData, ) diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index f1c0af8..f7fd862 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -9,6 +9,7 @@ from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( VectorIndex, VoltageClampStimulusSeries, ) + from .conftest import _ragged_array From 9bd36340d784263844dda48a3c2b65471b93a001 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 13 Aug 2024 19:08:36 -0700 Subject: [PATCH 42/61] eager resolution of hdmf namespace when loaded from yaml --- nwb_linkml/src/nwb_linkml/io/schema.py | 43 ++++++++++++++++++++-- nwb_linkml/src/nwb_linkml/providers/git.py | 30 +++++++++++++-- 2 files changed, 65 insertions(+), 8 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py index 954fb3a..3fd8aec 100644 --- a/nwb_linkml/src/nwb_linkml/io/schema.py +++ b/nwb_linkml/src/nwb_linkml/io/schema.py @@ -5,6 +5,7 @@ Loading/saving NWB Schema yaml files from pathlib import Path from pprint import pprint from typing import Optional +import warnings from linkml_runtime.loaders import yaml_loader @@ -82,6 +83,8 @@ def load_namespace_adapter( version (str): Optional: tag or commit to check out namespace is a :class:`.NamespaceRepo`. If ``None``, use ``HEAD`` if not already checked out, or otherwise use whatever version is already checked out. + imported (list[:class:`.NamespacesAdapter`]): Optional: override discovered imports + with already-loaded namespaces adapters Returns: :class:`.NamespacesAdapter` @@ -111,10 +114,17 @@ def load_namespace_adapter( for ns in namespaces.namespaces: for schema in ns.schema_: if schema.source is None: - # this is normal, we'll resolve later - continue - yml_file = (path / schema.source).resolve() - sch.append(load_schema_file(yml_file)) + if imported is None and schema.namespace == "hdmf-common": + # special case - hdmf-common is imported by name without location or version, + # so to get the correct version we have to handle it separately + imported = _resolve_hdmf(namespace, path) + if imported is not None: + imported = [imported] + else: + continue + else: + yml_file = (path / schema.source).resolve() + sch.append(load_schema_file(yml_file)) if imported is not None: adapter = NamespacesAdapter(namespaces=namespaces, schemas=sch, imported=imported) @@ -124,6 +134,31 @@ def load_namespace_adapter( return adapter +def _resolve_hdmf( + namespace: Path | NamespaceRepo | Namespaces, path: Optional[Path] = None +) -> Optional[NamespacesAdapter]: + if path is None and isinstance(namespace, Namespaces): + # cant get any more information from already-loaded namespaces without a path + return None + + if isinstance(namespace, NamespaceRepo): + # easiest route is if we got a NamespaceRepo + if namespace.name == "core": + hdmf_path = (path / namespace.imports["hdmf-common"]).resolve() + return load_namespace_adapter(namespace=hdmf_path) + # otherwise the hdmf-common adapter itself, and it loads common + else: + return None + elif path is not None: + # otherwise try and get it from relative paths + # pretty much a hack, but hey we are compensating for absence of versioning system here + maybe_repo_root = path / NWB_CORE_REPO.imports["hdmf-common"] + if maybe_repo_root.exists(): + return load_namespace_adapter(namespace=maybe_repo_root) + warnings.warn(f"Could not locate hdmf-common from namespace {namespace} and path {path}") + return None + + def load_nwb_core( core_version: str = "2.7.0", hdmf_version: str = "1.8.0", hdmf_only: bool = False ) -> NamespacesAdapter: diff --git a/nwb_linkml/src/nwb_linkml/providers/git.py b/nwb_linkml/src/nwb_linkml/providers/git.py index 05ba68b..8219aaf 100644 --- a/nwb_linkml/src/nwb_linkml/providers/git.py +++ b/nwb_linkml/src/nwb_linkml/providers/git.py @@ -36,6 +36,14 @@ class NamespaceRepo(BaseModel): ), default_factory=list, ) + imports: Optional[dict[str, Path]] = Field( + None, + description=( + "Any named imports that are included eg. as submodules within their repository. Dict" + " mapping schema name (used in the namespace field) to the namespace file relative to" + " the directory containing the **namespace.yaml file** (not the repo root)" + ), + ) def provide_from_git(self, commit: str | None = None) -> Path: """Provide a namespace file from a git repo""" @@ -61,6 +69,7 @@ NWB_CORE_REPO = NamespaceRepo( "2.6.0", "2.7.0", ], + imports={"hdmf-common": Path("../hdmf-common-schema") / "common" / "namespace.yaml"}, ) HDMF_COMMON_REPO = NamespaceRepo( @@ -86,7 +95,7 @@ HDMF_COMMON_REPO = NamespaceRepo( DEFAULT_REPOS = { repo.name: repo for repo in [NWB_CORE_REPO, HDMF_COMMON_REPO] -} # type: Dict[str, NamespaceRepo] +} # type: dict[str, NamespaceRepo] class GitError(OSError): @@ -112,7 +121,7 @@ class GitRepo: self.namespace = namespace self._commit = commit - def _git_call(self, *args: List[str]) -> subprocess.CompletedProcess: + def _git_call(self, *args: str) -> subprocess.CompletedProcess: res = subprocess.run(["git", "-C", self.temp_directory, *args], capture_output=True) if res.returncode != 0: raise GitError( @@ -138,8 +147,11 @@ class GitRepo: """ URL for "origin" remote """ - res = self._git_call("remote", "get-url", "origin") - return res.stdout.decode("utf-8").strip() + try: + res = self._git_call("remote", "get-url", "origin") + return res.stdout.decode("utf-8").strip() + except GitError: + return "" @property def active_commit(self) -> str: @@ -157,6 +169,16 @@ class GitRepo: """ return self.temp_directory / self.namespace.path + @property + def import_namespaces(self) -> dict[str, Path]: + """ + Absolute location of each of the imported namespaces specified in + :attr:`.NamespaceRepo.imports` + """ + if self.namespace.imports is None: + return {} + return {k: (self.namespace_file / v).resolve() for k, v in self.namespace.imports.items()} + @property def commit(self) -> Optional[str]: """ From 1715043c012c418ff339a97eb5d24c809faa9517 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 13 Aug 2024 19:09:43 -0700 Subject: [PATCH 43/61] lint --- nwb_linkml/src/nwb_linkml/io/schema.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py index 3fd8aec..a815b66 100644 --- a/nwb_linkml/src/nwb_linkml/io/schema.py +++ b/nwb_linkml/src/nwb_linkml/io/schema.py @@ -2,10 +2,10 @@ Loading/saving NWB Schema yaml files """ +import warnings from pathlib import Path from pprint import pprint from typing import Optional -import warnings from linkml_runtime.loaders import yaml_loader @@ -155,7 +155,9 @@ def _resolve_hdmf( maybe_repo_root = path / NWB_CORE_REPO.imports["hdmf-common"] if maybe_repo_root.exists(): return load_namespace_adapter(namespace=maybe_repo_root) - warnings.warn(f"Could not locate hdmf-common from namespace {namespace} and path {path}") + warnings.warn( + f"Could not locate hdmf-common from namespace {namespace} and path {path}", stacklevel=1 + ) return None From 50005d33e595e9b457f48bd340cdfe0feba8ff15 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 13 Aug 2024 19:11:46 -0700 Subject: [PATCH 44/61] fuckin codespell. also don't double run tests --- .github/workflows/lint.yml | 2 ++ .github/workflows/tests.yml | 2 ++ nwb_linkml/src/nwb_linkml/io/schema.py | 2 +- 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index fdad9a8..7f8fc5e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -2,6 +2,8 @@ name: Lint on: push: + branches: + - main pull_request: branches: [main] diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c14e806..ce2fffc 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,6 +2,8 @@ name: Tests on: push: + branches: + - main pull_request: branches: - main diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py index a815b66..029fc70 100644 --- a/nwb_linkml/src/nwb_linkml/io/schema.py +++ b/nwb_linkml/src/nwb_linkml/io/schema.py @@ -138,7 +138,7 @@ def _resolve_hdmf( namespace: Path | NamespaceRepo | Namespaces, path: Optional[Path] = None ) -> Optional[NamespacesAdapter]: if path is None and isinstance(namespace, Namespaces): - # cant get any more information from already-loaded namespaces without a path + # can't get any more information from already-loaded namespaces without a path return None if isinstance(namespace, NamespaceRepo): From 01e46f753160a8217059f818b61aff21040e3f17 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 13 Aug 2024 21:25:56 -0700 Subject: [PATCH 45/61] Make VectorData and VectorIndex generics to ensure coercion to VectorData for declared columns --- nwb_linkml/pyproject.toml | 6 +- .../src/nwb_linkml/generators/pydantic.py | 23 ++++ nwb_linkml/src/nwb_linkml/includes/hdmf.py | 59 +++++++--- nwb_linkml/src/nwb_linkml/io/schema.py | 2 +- nwb_linkml/tests/test_includes/test_hdmf.py | 107 +++++++++++++++++- 5 files changed, 176 insertions(+), 21 deletions(-) diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index 8119a7f..ffe0f54 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -36,14 +36,10 @@ plot = [ "dash-cytoscape<1.0.0,>=0.3.0", ] tests = [ - "nwb-linkml[plot]", + "nwb-linkml", "pytest<8.0.0,>=7.4.0", "pytest-depends<2.0.0,>=1.0.1", - "coverage<7.0.0,>=6.1.1", - "pytest-md<1.0.0,>=0.2.0", "pytest-cov<5.0.0,>=4.1.0", - "coveralls<4.0.0,>=3.3.1", - "pytest-profiling<2.0.0,>=1.7.0", "sybil<6.0.0,>=5.0.3", "requests-cache>=1.2.1", ] diff --git a/nwb_linkml/src/nwb_linkml/generators/pydantic.py b/nwb_linkml/src/nwb_linkml/generators/pydantic.py index 35ae598..0cdfd23 100644 --- a/nwb_linkml/src/nwb_linkml/generators/pydantic.py +++ b/nwb_linkml/src/nwb_linkml/generators/pydantic.py @@ -116,6 +116,7 @@ class NWBPydanticGenerator(PydanticGenerator): def after_generate_class(self, cls: ClassResult, sv: SchemaView) -> ClassResult: """Customize dynamictable behavior""" cls = AfterGenerateClass.inject_dynamictable(cls) + cls = AfterGenerateClass.wrap_dynamictable_columns(cls, sv) return cls def before_render_template(self, template: PydanticModule, sv: SchemaView) -> PydanticModule: @@ -278,6 +279,28 @@ class AfterGenerateClass: return cls + @staticmethod + def wrap_dynamictable_columns(cls: ClassResult, sv: SchemaView) -> ClassResult: + """ + Wrap NDArray columns inside of dynamictables with ``VectorData`` or + ``VectorIndex``, which are generic classes whose value slot is + parameterized by the NDArray + """ + if cls.source.is_a == "DynamicTable" or "DynamicTable" in sv.class_ancestors( + cls.source.name + ): + for an_attr in cls.cls.attributes: + if "NDArray" in (slot_range := cls.cls.attributes[an_attr].range): + if an_attr.endswith("_index"): + cls.cls.attributes[an_attr].range = "".join( + ["VectorIndex[", slot_range, "]"] + ) + else: + cls.cls.attributes[an_attr].range = "".join( + ["VectorData[", slot_range, "]"] + ) + return cls + def compile_python( text_or_fn: str, package_path: Path = None, module_name: str = "test" diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 2ddf2cc..e4b9ff1 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -2,15 +2,18 @@ Special types for mimicking HDMF special case behavior """ +import sys from typing import ( TYPE_CHECKING, Any, ClassVar, Dict, + Generic, Iterable, List, Optional, Tuple, + TypeVar, Union, overload, ) @@ -33,6 +36,9 @@ from pydantic import ( if TYPE_CHECKING: from nwb_linkml.models import VectorData, VectorIndex +T = TypeVar("T", bound=NDArray) +T_INJECT = 'T = TypeVar("T", bound=NDArray)' + class DynamicTableMixin(BaseModel): """ @@ -219,20 +225,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -322,7 +336,7 @@ class DynamicTableMixin(BaseModel): ) -class VectorDataMixin(BaseModel): +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -330,7 +344,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -373,13 +387,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -649,8 +663,10 @@ DYNAMIC_TABLE_IMPORTS = Imports( module="typing", objects=[ ObjectImport(name="ClassVar"), + ObjectImport(name="Generic"), ObjectImport(name="Iterable"), ObjectImport(name="Tuple"), + ObjectImport(name="TypeVar"), ObjectImport(name="overload"), ], ), @@ -677,6 +693,7 @@ VectorData is purposefully excluded as an import or an inject so that it will be resolved to the VectorData definition in the generated module """ DYNAMIC_TABLE_INJECTS = [ + T_INJECT, VectorDataMixin, VectorIndexMixin, DynamicTableRegionMixin, @@ -689,13 +706,27 @@ TSRVD_IMPORTS = Imports( Import( module="typing", objects=[ - ObjectImport(name="overload"), + ObjectImport(name="Generic"), ObjectImport(name="Iterable"), ObjectImport(name="Tuple"), + ObjectImport(name="TypeVar"), + ObjectImport(name="overload"), ], ), Import(module="pydantic", objects=[ObjectImport(name="model_validator")]), ] ) """Imports for TimeSeriesReferenceVectorData""" -TSRVD_INJECTS = [VectorDataMixin, TimeSeriesReferenceVectorDataMixin] +TSRVD_INJECTS = [T_INJECT, VectorDataMixin, TimeSeriesReferenceVectorDataMixin] + +if "pytest" in sys.modules: + # during testing define concrete subclasses... + class VectorData(VectorDataMixin): + """VectorData subclass for testing""" + + pass + + class VectorIndex(VectorIndexMixin): + """VectorIndex subclass for testing""" + + pass diff --git a/nwb_linkml/src/nwb_linkml/io/schema.py b/nwb_linkml/src/nwb_linkml/io/schema.py index 029fc70..42718f5 100644 --- a/nwb_linkml/src/nwb_linkml/io/schema.py +++ b/nwb_linkml/src/nwb_linkml/io/schema.py @@ -114,7 +114,7 @@ def load_namespace_adapter( for ns in namespaces.namespaces: for schema in ns.schema_: if schema.source is None: - if imported is None and schema.namespace == "hdmf-common": + if imported is None and schema.namespace == "hdmf-common" and ns.name == "core": # special case - hdmf-common is imported by name without location or version, # so to get the correct version we have to handle it separately imported = _resolve_hdmf(namespace, path) diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index f7fd862..bde829b 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -1,5 +1,9 @@ import numpy as np import pandas as pd +from numpydantic import NDArray, Shape + +from nwb_linkml.includes import hdmf +from nwb_linkml.includes.hdmf import DynamicTableMixin, VectorDataMixin, VectorIndexMixin # FIXME: Make this just be the output of the provider by patching into import machinery from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( @@ -48,7 +52,7 @@ def test_dynamictable_indexing(electrical_series): # get a single column col = electrodes["y"] - assert all(col == [5, 6, 7, 8, 9]) + assert all(col.value == [5, 6, 7, 8, 9]) # get a single cell val = electrodes[0, "y"] @@ -198,3 +202,104 @@ def test_aligned_dynamictable(intracellular_recordings_table): for i in range(len(stims)): assert isinstance(stims[i], VoltageClampStimulusSeries) assert all([i == val for val in stims[i][:]]) + + +# -------------------------------------------------- +# Direct mixin tests +# -------------------------------------------------- + + +def test_dynamictable_mixin_indexing(): + """ + This is just a placeholder test to say that indexing is tested above + with actual model objects in case i ever ctrl+f for this + """ + pass + + +def test_dynamictable_mixin_colnames(): + """ + Should correctly infer colnames + """ + + class MyDT(DynamicTableMixin): + existing_col: NDArray[Shape["* col"], int] + + new_col_1 = VectorDataMixin(value=np.arange(10)) + new_col_2 = VectorDataMixin(value=np.arange(10)) + + inst = MyDT(existing_col=np.arange(10), new_col_1=new_col_1, new_col_2=new_col_2) + assert inst.colnames == ["existing_col", "new_col_1", "new_col_2"] + + +def test_dynamictable_mixin_colnames_index(): + """ + Exclude index columns in colnames + """ + + class MyDT(DynamicTableMixin): + existing_col: NDArray[Shape["* col"], int] + + cols = { + "existing_col": np.arange(10), + "new_col_1": hdmf.VectorData(value=np.arange(10)), + "new_col_2": hdmf.VectorData(value=np.arange(10)), + } + # explicit index with mismatching name + cols["weirdname_index"] = VectorIndexMixin(value=np.arange(10), target=cols["new_col_1"]) + # implicit index with matching name + cols["new_col_2_index"] = VectorIndexMixin(value=np.arange(10)) + + inst = MyDT(**cols) + assert inst.colnames == ["existing_col", "new_col_1", "new_col_2"] + + +def test_dynamictable_mixin_colnames_ordered(): + """ + Should be able to pass explicit order to colnames + """ + + class MyDT(DynamicTableMixin): + existing_col: NDArray[Shape["* col"], int] + + cols = { + "existing_col": np.arange(10), + "new_col_1": hdmf.VectorData(value=np.arange(10)), + "new_col_2": hdmf.VectorData(value=np.arange(10)), + "new_col_3": hdmf.VectorData(value=np.arange(10)), + } + order = ["new_col_2", "existing_col", "new_col_1", "new_col_3"] + + inst = MyDT(**cols, colnames=order) + assert inst.colnames == order + + # this should get reflected in the columns selector and the df produces + assert all([key1 == key2 for key1, key2 in zip(order, inst._columns)]) + assert all(inst[0].columns == order) + + # partial lists should append unnamed columsn at the end + partial_order = ["new_col_3", "new_col_2"] + inst = MyDT(**cols, colnames=partial_order) + assert inst.colnames == [*partial_order, "existing_col", "new_col_1"] + + +def test_dynamictable_mixin_getattr(): + """ + Dynamictable should forward unknown getattr requests to the df + """ + + class MyDT(DynamicTableMixin): + existing_col: NDArray[Shape["* col"], int] + + class AModel(DynamicTableMixin): + col: hdmf.VectorData[NDArray[Shape["3, 3"], int]] + + col = hdmf.VectorData(value=np.arange(10)) + inst = MyDT(existing_col=col) + # regular lookup for attrs that exist + + # pdb.set_trace() + # inst.existing_col + # assert inst.existing_col == col + # df lookup otherwise + # inst.columns From 980db25b155f6b87096b909a1882ca72244c6c71 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 13 Aug 2024 21:26:50 -0700 Subject: [PATCH 46/61] regenerate models --- .../pydantic/core/v2_2_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_0/core_nwb_file.py | 24 +++--- .../pydantic/core/v2_2_0/core_nwb_icephys.py | 4 +- .../pydantic/core/v2_2_0/core_nwb_misc.py | 58 ++++++++------ .../pydantic/core/v2_2_1/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_1/core_nwb_file.py | 24 +++--- .../pydantic/core/v2_2_1/core_nwb_icephys.py | 4 +- .../pydantic/core/v2_2_1/core_nwb_misc.py | 58 ++++++++------ .../pydantic/core/v2_2_2/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_2/core_nwb_file.py | 24 +++--- .../pydantic/core/v2_2_2/core_nwb_icephys.py | 4 +- .../pydantic/core/v2_2_2/core_nwb_misc.py | 58 ++++++++------ .../pydantic/core/v2_2_4/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_4/core_nwb_file.py | 24 +++--- .../pydantic/core/v2_2_4/core_nwb_icephys.py | 4 +- .../pydantic/core/v2_2_4/core_nwb_misc.py | 58 ++++++++------ .../pydantic/core/v2_2_4/core_nwb_ophys.py | 2 +- .../pydantic/core/v2_2_5/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_2_5/core_nwb_file.py | 24 +++--- .../pydantic/core/v2_2_5/core_nwb_icephys.py | 4 +- .../pydantic/core/v2_2_5/core_nwb_misc.py | 58 ++++++++------ .../pydantic/core/v2_2_5/core_nwb_ophys.py | 2 +- .../pydantic/core/v2_3_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_3_0/core_nwb_file.py | 24 +++--- .../pydantic/core/v2_3_0/core_nwb_icephys.py | 4 +- .../pydantic/core/v2_3_0/core_nwb_misc.py | 76 ++++++++++-------- .../pydantic/core/v2_3_0/core_nwb_ophys.py | 2 +- .../pydantic/core/v2_4_0/core_nwb_base.py | 24 +++++- .../pydantic/core/v2_4_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_4_0/core_nwb_file.py | 24 +++--- .../pydantic/core/v2_4_0/core_nwb_icephys.py | 22 +++--- .../pydantic/core/v2_4_0/core_nwb_misc.py | 76 ++++++++++-------- .../pydantic/core/v2_4_0/core_nwb_ophys.py | 2 +- .../pydantic/core/v2_5_0/core_nwb_base.py | 14 +++- .../pydantic/core/v2_5_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_5_0/core_nwb_file.py | 24 +++--- .../pydantic/core/v2_5_0/core_nwb_icephys.py | 22 +++--- .../pydantic/core/v2_5_0/core_nwb_misc.py | 76 ++++++++++-------- .../pydantic/core/v2_5_0/core_nwb_ophys.py | 2 +- .../core/v2_6_0_alpha/core_nwb_base.py | 14 +++- .../core/v2_6_0_alpha/core_nwb_epoch.py | 8 +- .../core/v2_6_0_alpha/core_nwb_file.py | 24 +++--- .../core/v2_6_0_alpha/core_nwb_icephys.py | 22 +++--- .../core/v2_6_0_alpha/core_nwb_misc.py | 76 ++++++++++-------- .../core/v2_6_0_alpha/core_nwb_ophys.py | 2 +- .../pydantic/core/v2_7_0/core_nwb_base.py | 20 ++++- .../pydantic/core/v2_7_0/core_nwb_epoch.py | 8 +- .../pydantic/core/v2_7_0/core_nwb_file.py | 24 +++--- .../pydantic/core/v2_7_0/core_nwb_icephys.py | 22 +++--- .../pydantic/core/v2_7_0/core_nwb_misc.py | 76 ++++++++++-------- .../pydantic/core/v2_7_0/core_nwb_ophys.py | 2 +- .../hdmf_common/v1_1_0/hdmf_common_table.py | 77 ++++++++++++++---- .../hdmf_common/v1_1_2/hdmf_common_table.py | 77 ++++++++++++++---- .../hdmf_common/v1_1_3/hdmf_common_table.py | 77 ++++++++++++++---- .../hdmf_common/v1_2_0/hdmf_common_table.py | 77 ++++++++++++++---- .../hdmf_common/v1_2_1/hdmf_common_table.py | 77 ++++++++++++++---- .../hdmf_common/v1_3_0/hdmf_common_table.py | 77 ++++++++++++++---- .../hdmf_common/v1_4_0/hdmf_common_table.py | 77 ++++++++++++++---- .../hdmf_common/v1_5_0/hdmf_common_table.py | 79 +++++++++++++++---- .../hdmf_common/v1_5_1/hdmf_common_table.py | 79 +++++++++++++++---- .../hdmf_common/v1_6_0/hdmf_common_table.py | 79 +++++++++++++++---- .../hdmf_common/v1_7_0/hdmf_common_table.py | 79 +++++++++++++++---- .../hdmf_common/v1_8_0/hdmf_common_table.py | 60 ++++++++++---- 63 files changed, 1391 insertions(+), 712 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py index 33abf7e..ada000f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -164,7 +164,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py index 3bd6ec5..548cc3b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -394,7 +394,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, float] = Field( + x: VectorData[NDArray[Any, float]] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -403,7 +403,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, float] = Field( + y: VectorData[NDArray[Any, float]] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -412,7 +412,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, float] = Field( + z: VectorData[NDArray[Any, float]] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -421,7 +421,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, float] = Field( + imp: VectorData[NDArray[Any, float]] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -430,7 +430,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -439,7 +439,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, float] = Field( + filtering: VectorData[NDArray[Any, float]] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -451,7 +451,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -460,7 +460,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -469,7 +469,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -478,7 +478,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -487,7 +487,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference used for this electrode.""", json_schema_extra={ @@ -501,7 +501,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py index f09b466..86cbcb4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -864,7 +864,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -893,7 +893,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py index b1e0fe6..0e16f74 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -401,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -410,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -424,12 +424,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -439,7 +439,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -489,19 +489,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -530,16 +532,20 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: List[str] = Field( @@ -547,7 +553,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py index 3e84ef3..92fd747 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -164,7 +164,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py index 199bbda..06fdd52 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -394,7 +394,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, float] = Field( + x: VectorData[NDArray[Any, float]] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -403,7 +403,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, float] = Field( + y: VectorData[NDArray[Any, float]] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -412,7 +412,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, float] = Field( + z: VectorData[NDArray[Any, float]] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -421,7 +421,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, float] = Field( + imp: VectorData[NDArray[Any, float]] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -430,7 +430,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -439,7 +439,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, float] = Field( + filtering: VectorData[NDArray[Any, float]] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -451,7 +451,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -460,7 +460,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -469,7 +469,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -478,7 +478,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -487,7 +487,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference used for this electrode.""", json_schema_extra={ @@ -501,7 +501,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py index 9c49daa..d004723 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -864,7 +864,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -893,7 +893,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py index c547aef..cf92403 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -401,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -410,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -424,12 +424,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -439,7 +439,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -489,19 +489,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -530,16 +532,20 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: List[str] = Field( @@ -547,7 +553,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py index 35bfa64..215cca6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -164,7 +164,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py index c05c351..5d28191 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -394,7 +394,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, float] = Field( + x: VectorData[NDArray[Any, float]] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -403,7 +403,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, float] = Field( + y: VectorData[NDArray[Any, float]] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -412,7 +412,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, float] = Field( + z: VectorData[NDArray[Any, float]] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -421,7 +421,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, float] = Field( + imp: VectorData[NDArray[Any, float]] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -430,7 +430,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -439,7 +439,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, float] = Field( + filtering: VectorData[NDArray[Any, float]] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -451,7 +451,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -460,7 +460,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -469,7 +469,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -478,7 +478,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -487,7 +487,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference used for this electrode.""", json_schema_extra={ @@ -501,7 +501,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py index 5cc88ed..cb14508 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -864,7 +864,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -893,7 +893,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py index 1f41641..6e8daf9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -401,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -410,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -424,12 +424,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -439,7 +439,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -489,19 +489,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -530,16 +532,20 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: List[str] = Field( @@ -547,7 +553,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 8012a74..e216685 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -164,7 +164,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py index 24f0613..e70c2dc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -370,7 +370,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, float] = Field( + x: VectorData[NDArray[Any, float]] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -379,7 +379,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, float] = Field( + y: VectorData[NDArray[Any, float]] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -388,7 +388,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, float] = Field( + z: VectorData[NDArray[Any, float]] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -397,7 +397,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, float] = Field( + imp: VectorData[NDArray[Any, float]] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -406,7 +406,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -415,7 +415,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, float] = Field( + filtering: VectorData[NDArray[Any, float]] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -427,7 +427,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -436,7 +436,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -445,7 +445,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -454,7 +454,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -463,7 +463,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference used for this electrode.""", json_schema_extra={ @@ -477,7 +477,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py index 4272b2e..98b68dc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -864,7 +864,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -893,7 +893,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py index dc6d0a6..1a7a26a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -401,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -410,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -424,12 +424,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -439,7 +439,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -489,19 +489,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -530,16 +532,20 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: List[str] = Field( @@ -547,7 +553,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py index d7adad4..54b8b6a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -369,7 +369,7 @@ class PlaneSegmentation(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py index 4a4f076..3bee6b5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -164,7 +164,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py index 21932b7..5f22cff 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -370,7 +370,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, float] = Field( + x: VectorData[NDArray[Any, float]] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -379,7 +379,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, float] = Field( + y: VectorData[NDArray[Any, float]] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -388,7 +388,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, float] = Field( + z: VectorData[NDArray[Any, float]] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -397,7 +397,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, float] = Field( + imp: VectorData[NDArray[Any, float]] = Field( ..., description="""Impedance of the channel.""", json_schema_extra={ @@ -406,7 +406,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -415,7 +415,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, float] = Field( + filtering: VectorData[NDArray[Any, float]] = Field( ..., description="""Description of hardware filtering.""", json_schema_extra={ @@ -427,7 +427,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -436,7 +436,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -445,7 +445,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -454,7 +454,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -463,7 +463,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference used for this electrode.""", json_schema_extra={ @@ -477,7 +477,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py index 2dd0607..220fc73 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -864,7 +864,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -893,7 +893,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py index b9ef472..4a2cdd6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -401,7 +401,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -410,7 +410,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -424,12 +424,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -439,7 +439,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -489,19 +489,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -530,16 +532,20 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") colnames: List[str] = Field( @@ -547,7 +553,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py index 1b006ba..4e95539 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -371,7 +371,7 @@ class PlaneSegmentation(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py index ff49854..fd6ff0a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -164,7 +164,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py index dcd3d20..4fbd77b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -370,7 +370,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, float] = Field( + x: VectorData[NDArray[Any, float]] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -379,7 +379,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, float] = Field( + y: VectorData[NDArray[Any, float]] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -388,7 +388,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, float] = Field( + z: VectorData[NDArray[Any, float]] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -397,7 +397,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, float] = Field( + imp: VectorData[NDArray[Any, float]] = Field( ..., description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -406,7 +406,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -415,7 +415,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, float] = Field( + filtering: VectorData[NDArray[Any, float]] = Field( ..., description="""Description of hardware filtering, including the filter name and frequency cutoffs.""", json_schema_extra={ @@ -427,7 +427,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -436,7 +436,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -445,7 +445,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -454,7 +454,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -463,7 +463,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference used for this electrode.""", json_schema_extra={ @@ -477,7 +477,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py index bac67ab..95dc2af 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -866,7 +866,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -895,7 +895,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py index 6e382f4..f0e9795 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -413,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -422,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -436,12 +436,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -451,7 +451,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -498,19 +498,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -539,26 +541,32 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, + waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + json_schema_extra={ + "linkml_meta": { + "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} + } + }, + ) ) waveforms_index: Named[Optional[VectorIndex]] = Field( None, @@ -589,7 +597,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py index 637d869..ee291fb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -380,7 +380,7 @@ class PlaneSegmentation(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py index 2b85c2f..2328c2c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -5,7 +5,20 @@ from enum import Enum import re import sys import numpy as np -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, overload, Iterable, Tuple +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_validator from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container from numpydantic import NDArray, Shape @@ -58,8 +71,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -67,7 +82,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -125,6 +140,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + """ + Each of the three indexing columns must be the same length to work! + """ assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," f" timeseries: {len(self.timeseries)}" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py index c4415c8..31033ca 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -164,7 +164,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py index 8ddb92c..bc2132e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -378,7 +378,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: NDArray[Any, float] = Field( + x: VectorData[NDArray[Any, float]] = Field( ..., description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -387,7 +387,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: NDArray[Any, float] = Field( + y: VectorData[NDArray[Any, float]] = Field( ..., description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -396,7 +396,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: NDArray[Any, float] = Field( + z: VectorData[NDArray[Any, float]] = Field( ..., description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -405,7 +405,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: NDArray[Any, float] = Field( + imp: VectorData[NDArray[Any, float]] = Field( ..., description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -414,7 +414,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -423,7 +423,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: NDArray[Any, str] = Field( + filtering: VectorData[NDArray[Any, str]] = Field( ..., description="""Description of hardware filtering, including the filter name and frequency cutoffs.""", json_schema_extra={ @@ -435,7 +435,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -444,7 +444,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -453,7 +453,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -462,7 +462,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -471,7 +471,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference used for this electrode.""", json_schema_extra={ @@ -485,7 +485,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py index cd08504..10cce5d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -873,7 +873,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -902,7 +902,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -939,7 +939,7 @@ class IntracellularElectrodesTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -985,7 +985,7 @@ class IntracellularStimuliTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1031,7 +1031,7 @@ class IntracellularResponsesTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1097,7 +1097,7 @@ class IntracellularRecordingsTable(AlignedDynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1146,7 +1146,7 @@ class SimultaneousRecordingsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1220,7 +1220,7 @@ class SequentialRecordingsTable(DynamicTable): } }, ) - stimulus_type: NDArray[Any, str] = Field( + stimulus_type: VectorData[NDArray[Any, str]] = Field( ..., description="""The type of stimulus used for the sequential recording.""", json_schema_extra={ @@ -1234,7 +1234,7 @@ class SequentialRecordingsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1313,7 +1313,7 @@ class RepetitionsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1394,7 +1394,7 @@ class ExperimentalConditionsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py index 7c6183f..5a64e12 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -413,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -422,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -436,12 +436,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -451,7 +451,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -498,19 +498,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -539,26 +541,32 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, + waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + json_schema_extra={ + "linkml_meta": { + "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} + } + }, + ) ) waveforms_index: Named[Optional[VectorIndex]] = Field( None, @@ -589,7 +597,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 96a8013..da2c58f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -380,7 +380,7 @@ class PlaneSegmentation(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py index 7973568..d080acf 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -16,12 +16,13 @@ from typing import ( Dict, Optional, Union, - overload, + Generic, Iterable, Tuple, + TypeVar, + overload, Annotated, Type, - TypeVar, ) from pydantic import ( BaseModel, @@ -81,8 +82,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -90,7 +93,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -148,6 +151,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + """ + Each of the three indexing columns must be the same length to work! + """ assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," f" timeseries: {len(self.timeseries)}" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py index 6091bbf..fd5f403 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -173,7 +173,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py index e09f4fb..ae0ce47 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -381,7 +381,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: Optional[NDArray[Any, float]] = Field( + x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -390,7 +390,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: Optional[NDArray[Any, float]] = Field( + y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -399,7 +399,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: Optional[NDArray[Any, float]] = Field( + z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -408,7 +408,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: Optional[NDArray[Any, float]] = Field( + imp: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -417,7 +417,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -426,7 +426,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: Optional[NDArray[Any, str]] = Field( + filtering: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of hardware filtering, including the filter name and frequency cutoffs.""", json_schema_extra={ @@ -438,7 +438,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -447,7 +447,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -456,7 +456,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -465,7 +465,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -474,7 +474,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".""", json_schema_extra={ @@ -488,7 +488,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py index a7175f4..4a7ccc2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -874,7 +874,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -903,7 +903,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -940,7 +940,7 @@ class IntracellularElectrodesTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -986,7 +986,7 @@ class IntracellularStimuliTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1032,7 +1032,7 @@ class IntracellularResponsesTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1098,7 +1098,7 @@ class IntracellularRecordingsTable(AlignedDynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1147,7 +1147,7 @@ class SimultaneousRecordingsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1221,7 +1221,7 @@ class SequentialRecordingsTable(DynamicTable): } }, ) - stimulus_type: NDArray[Any, str] = Field( + stimulus_type: VectorData[NDArray[Any, str]] = Field( ..., description="""The type of stimulus used for the sequential recording.""", json_schema_extra={ @@ -1235,7 +1235,7 @@ class SequentialRecordingsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1314,7 +1314,7 @@ class RepetitionsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1395,7 +1395,7 @@ class ExperimentalConditionsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py index e463240..5a7755b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -413,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -422,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -436,12 +436,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -451,7 +451,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -498,19 +498,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -539,26 +541,32 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, + waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + json_schema_extra={ + "linkml_meta": { + "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} + } + }, + ) ) waveforms_index: Named[Optional[VectorIndex]] = Field( None, @@ -589,7 +597,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py index c80c328..268b313 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -380,7 +380,7 @@ class PlaneSegmentation(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 6cf4107..26677a0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -16,12 +16,13 @@ from typing import ( Dict, Optional, Union, - overload, + Generic, Iterable, Tuple, + TypeVar, + overload, Annotated, Type, - TypeVar, ) from pydantic import ( BaseModel, @@ -81,8 +82,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -90,7 +93,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -148,6 +151,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + """ + Each of the three indexing columns must be the same length to work! + """ assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," f" timeseries: {len(self.timeseries)}" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index 6095a6f..29ed69e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -173,7 +173,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index 19bdf9c..db4b5ca 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -381,7 +381,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: Optional[NDArray[Any, float]] = Field( + x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -390,7 +390,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: Optional[NDArray[Any, float]] = Field( + y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -399,7 +399,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: Optional[NDArray[Any, float]] = Field( + z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -408,7 +408,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: Optional[NDArray[Any, float]] = Field( + imp: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -417,7 +417,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -426,7 +426,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: Optional[NDArray[Any, str]] = Field( + filtering: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of hardware filtering, including the filter name and frequency cutoffs.""", json_schema_extra={ @@ -438,7 +438,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -447,7 +447,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -456,7 +456,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -465,7 +465,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -474,7 +474,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".""", json_schema_extra={ @@ -488,7 +488,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index 731a452..b18f673 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -874,7 +874,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -903,7 +903,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -940,7 +940,7 @@ class IntracellularElectrodesTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -986,7 +986,7 @@ class IntracellularStimuliTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1032,7 +1032,7 @@ class IntracellularResponsesTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1098,7 +1098,7 @@ class IntracellularRecordingsTable(AlignedDynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1147,7 +1147,7 @@ class SimultaneousRecordingsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1221,7 +1221,7 @@ class SequentialRecordingsTable(DynamicTable): } }, ) - stimulus_type: NDArray[Any, str] = Field( + stimulus_type: VectorData[NDArray[Any, str]] = Field( ..., description="""The type of stimulus used for the sequential recording.""", json_schema_extra={ @@ -1235,7 +1235,7 @@ class SequentialRecordingsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1314,7 +1314,7 @@ class RepetitionsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1395,7 +1395,7 @@ class ExperimentalConditionsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index 18065b1..66dad75 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -413,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -422,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -436,12 +436,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -451,7 +451,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -498,19 +498,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -539,26 +541,32 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, + waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + json_schema_extra={ + "linkml_meta": { + "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} + } + }, + ) ) waveforms_index: Named[Optional[VectorIndex]] = Field( None, @@ -589,7 +597,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index dc54f7c..7c3e17b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -479,7 +479,7 @@ class PlaneSegmentation(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py index 430e080..096733f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -1,4 +1,9 @@ from __future__ import annotations +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys import numpy as np from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container from numpydantic import NDArray, Shape @@ -11,18 +16,20 @@ from typing import ( Dict, Optional, Union, - overload, + Generic, Iterable, Tuple, + TypeVar, + overload, Annotated, Type, - TypeVar, ) from pydantic import ( BaseModel, ConfigDict, Field, RootModel, + field_validator, model_validator, ValidationInfo, BeforeValidator, @@ -75,8 +82,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -84,7 +93,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -142,6 +151,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": + """ + Each of the three indexing columns must be the same length to work! + """ assert len(self.idx_start) == len(self.timeseries) == len(self.count), ( f"Columns have differing lengths: idx: {len(self.idx_start)}, count: {len(self.count)}," f" timeseries: {len(self.timeseries)}" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py index d10191e..1b07dac 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -105,7 +105,7 @@ class TimeIntervals(DynamicTable): ) name: str = Field(...) - start_time: NDArray[Any, float] = Field( + start_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Start time of epoch, in seconds.""", json_schema_extra={ @@ -114,7 +114,7 @@ class TimeIntervals(DynamicTable): } }, ) - stop_time: NDArray[Any, float] = Field( + stop_time: VectorData[NDArray[Any, float]] = Field( ..., description="""Stop time of epoch, in seconds.""", json_schema_extra={ @@ -123,7 +123,7 @@ class TimeIntervals(DynamicTable): } }, ) - tags: Optional[NDArray[Any, str]] = Field( + tags: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""User-defined tags that identify or categorize events.""", json_schema_extra={ @@ -173,7 +173,7 @@ class TimeIntervals(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py index 950faa5..20ea663 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -389,7 +389,7 @@ class ExtracellularEphysElectrodes(DynamicTable): "linkml_meta": {"equals_string": "electrodes", "ifabsent": "string(electrodes)"} }, ) - x: Optional[NDArray[Any, float]] = Field( + x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate of the channel location in the brain (+x is posterior).""", json_schema_extra={ @@ -398,7 +398,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - y: Optional[NDArray[Any, float]] = Field( + y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate of the channel location in the brain (+y is inferior).""", json_schema_extra={ @@ -407,7 +407,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - z: Optional[NDArray[Any, float]] = Field( + z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate of the channel location in the brain (+z is right).""", json_schema_extra={ @@ -416,7 +416,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - imp: Optional[NDArray[Any, float]] = Field( + imp: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""Impedance of the channel, in ohms.""", json_schema_extra={ @@ -425,7 +425,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - location: NDArray[Any, str] = Field( + location: VectorData[NDArray[Any, str]] = Field( ..., description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", json_schema_extra={ @@ -434,7 +434,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - filtering: Optional[NDArray[Any, str]] = Field( + filtering: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of hardware filtering, including the filter name and frequency cutoffs.""", json_schema_extra={ @@ -446,7 +446,7 @@ class ExtracellularEphysElectrodes(DynamicTable): group: List[ElectrodeGroup] = Field( ..., description="""Reference to the ElectrodeGroup this electrode is a part of.""" ) - group_name: NDArray[Any, str] = Field( + group_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the ElectrodeGroup this electrode is a part of.""", json_schema_extra={ @@ -455,7 +455,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_x: Optional[NDArray[Any, float]] = Field( + rel_x: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""x coordinate in electrode group""", json_schema_extra={ @@ -464,7 +464,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_y: Optional[NDArray[Any, float]] = Field( + rel_y: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""y coordinate in electrode group""", json_schema_extra={ @@ -473,7 +473,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - rel_z: Optional[NDArray[Any, float]] = Field( + rel_z: VectorData[Optional[NDArray[Any, float]]] = Field( None, description="""z coordinate in electrode group""", json_schema_extra={ @@ -482,7 +482,7 @@ class ExtracellularEphysElectrodes(DynamicTable): } }, ) - reference: Optional[NDArray[Any, str]] = Field( + reference: VectorData[Optional[NDArray[Any, str]]] = Field( None, description="""Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".""", json_schema_extra={ @@ -496,7 +496,7 @@ class ExtracellularEphysElectrodes(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py index 86754a8..6993568 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -874,7 +874,7 @@ class SweepTable(DynamicTable): ) name: str = Field(...) - sweep_number: NDArray[Any, int] = Field( + sweep_number: VectorData[NDArray[Any, int]] = Field( ..., description="""Sweep number of the PatchClampSeries in that row.""", json_schema_extra={ @@ -903,7 +903,7 @@ class SweepTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -940,7 +940,7 @@ class IntracellularElectrodesTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -998,7 +998,7 @@ class IntracellularStimuliTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1044,7 +1044,7 @@ class IntracellularResponsesTable(DynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1110,7 +1110,7 @@ class IntracellularRecordingsTable(AlignedDynamicTable): ..., description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1159,7 +1159,7 @@ class SimultaneousRecordingsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1233,7 +1233,7 @@ class SequentialRecordingsTable(DynamicTable): } }, ) - stimulus_type: NDArray[Any, str] = Field( + stimulus_type: VectorData[NDArray[Any, str]] = Field( ..., description="""The type of stimulus used for the sequential recording.""", json_schema_extra={ @@ -1247,7 +1247,7 @@ class SequentialRecordingsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1326,7 +1326,7 @@ class RepetitionsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -1407,7 +1407,7 @@ class ExperimentalConditionsTable(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py index 2739d41..a30d3e0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -413,7 +413,7 @@ class DecompositionSeriesBands(DynamicTable): "bands", json_schema_extra={"linkml_meta": {"equals_string": "bands", "ifabsent": "string(bands)"}}, ) - band_name: NDArray[Any, str] = Field( + band_name: VectorData[NDArray[Any, str]] = Field( ..., description="""Name of the band, e.g. theta.""", json_schema_extra={ @@ -422,7 +422,7 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_limits: NDArray[Shape["* num_bands, 2 low_high"], float] = Field( + band_limits: VectorData[NDArray[Shape["* num_bands, 2 low_high"], float]] = Field( ..., description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", json_schema_extra={ @@ -436,12 +436,12 @@ class DecompositionSeriesBands(DynamicTable): } }, ) - band_mean: NDArray[Shape["* num_bands"], float] = Field( + band_mean: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The mean Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, ) - band_stdev: NDArray[Shape["* num_bands"], float] = Field( + band_stdev: VectorData[NDArray[Shape["* num_bands"], float]] = Field( ..., description="""The standard deviation of Gaussian filters, in Hz.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_bands"}]}}}, @@ -451,7 +451,7 @@ class DecompositionSeriesBands(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -498,19 +498,21 @@ class Units(DynamicTable): } }, ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]] = Field( - None, - description="""Observation intervals for each unit.""", - json_schema_extra={ - "linkml_meta": { - "array": { - "dimensions": [ - {"alias": "num_intervals"}, - {"alias": "start_end", "exact_cardinality": 2}, - ] + obs_intervals: VectorData[Optional[NDArray[Shape["* num_intervals, 2 start_end"], float]]] = ( + Field( + None, + description="""Observation intervals for each unit.""", + json_schema_extra={ + "linkml_meta": { + "array": { + "dimensions": [ + {"alias": "num_intervals"}, + {"alias": "start_end", "exact_cardinality": 2}, + ] + } } - } - }, + }, + ) ) electrodes_index: Named[Optional[VectorIndex]] = Field( None, @@ -539,26 +541,32 @@ class Units(DynamicTable): electrode_group: Optional[List[ElectrodeGroup]] = Field( None, description="""Electrode group that each spike unit came from.""" ) - waveform_mean: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_mean: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform mean for each spike unit.""") - waveform_sd: Optional[ - Union[ - NDArray[Shape["* num_units, * num_samples"], float], - NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + waveform_sd: VectorData[ + Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] ] ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", - json_schema_extra={ - "linkml_meta": { - "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} - } - }, + waveforms: VectorData[Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + json_schema_extra={ + "linkml_meta": { + "array": {"dimensions": [{"alias": "num_waveforms"}, {"alias": "num_samples"}]} + } + }, + ) ) waveforms_index: Named[Optional[VectorIndex]] = Field( None, @@ -589,7 +597,7 @@ class Units(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py index 04dcee9..e17fe42 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -479,7 +479,7 @@ class PlaneSegmentation(DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 9d5b38f..4676d58 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -5,7 +5,20 @@ from enum import Enum import re import sys import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -67,8 +80,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -76,7 +91,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -119,13 +134,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -406,20 +421,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -569,6 +592,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -698,7 +747,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 825e522..5066800 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -5,7 +5,20 @@ from enum import Enum import re import sys import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -67,8 +80,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -76,7 +91,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -119,13 +134,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -406,20 +421,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -569,6 +592,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -698,7 +747,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index e837810..02d0686 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -5,7 +5,20 @@ from enum import Enum import re import sys import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -67,8 +80,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -76,7 +91,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -119,13 +134,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -406,20 +421,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -569,6 +592,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -717,7 +766,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index 7c232b0..2d3ccc4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -6,7 +6,20 @@ import re import sys from ...hdmf_common.v1_2_0.hdmf_common_base import Data, Container import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -68,8 +81,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -77,7 +92,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -120,13 +135,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -407,20 +422,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -570,6 +593,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -711,7 +760,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index 9c4e909..df0d847 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -6,7 +6,20 @@ import re import sys from ...hdmf_common.v1_2_1.hdmf_common_base import Data, Container import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -68,8 +81,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -77,7 +92,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -120,13 +135,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -407,20 +422,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -570,6 +593,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -711,7 +760,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index f77e052..7d465e2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -6,7 +6,20 @@ import re import sys from ...hdmf_common.v1_3_0.hdmf_common_base import Data, Container import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -68,8 +81,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -77,7 +92,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -120,13 +135,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -407,20 +422,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -570,6 +593,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -711,7 +760,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index 6514e14..47400e6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -6,7 +6,20 @@ import re import sys from ...hdmf_common.v1_4_0.hdmf_common_base import Data, Container import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from numpydantic import NDArray, Shape from pydantic import ( BaseModel, @@ -68,8 +81,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -77,7 +92,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -120,13 +135,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -407,20 +422,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -570,6 +593,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -685,7 +734,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index b64b835..92c5eeb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -6,7 +6,20 @@ import re import sys from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from pydantic import ( BaseModel, ConfigDict, @@ -68,8 +81,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -77,7 +92,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -120,13 +135,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -407,20 +422,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -570,6 +593,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -685,7 +734,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -713,7 +762,7 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 3593099..755b884 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -6,7 +6,20 @@ import re import sys from ...hdmf_common.v1_5_1.hdmf_common_base import Data, Container import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from pydantic import ( BaseModel, ConfigDict, @@ -68,8 +81,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -77,7 +92,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -120,13 +135,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -407,20 +422,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -570,6 +593,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -685,7 +734,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -713,7 +762,7 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index b5d6b93..63d007d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -6,7 +6,20 @@ import re import sys from ...hdmf_common.v1_6_0.hdmf_common_base import Data, Container import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from pydantic import ( BaseModel, ConfigDict, @@ -68,8 +81,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -77,7 +92,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -120,13 +135,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -407,20 +422,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -570,6 +593,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -685,7 +734,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -713,7 +762,7 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 51820ad..4be797e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -6,7 +6,20 @@ import re import sys from ...hdmf_common.v1_7_0.hdmf_common_base import Data, Container import pandas as pd -from typing import Any, ClassVar, List, Literal, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from pydantic import ( BaseModel, ConfigDict, @@ -68,8 +81,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -77,7 +92,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -120,13 +135,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -407,20 +422,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -570,6 +593,32 @@ class AlignedDynamicTableMixin(DynamicTableMixin): df.set_index((self.name, "id"), drop=True, inplace=True) return df + @model_validator(mode="before") + @classmethod + def create_categories(cls, model: Dict[str, Any]) -> Dict: + """ + Construct categories from arguments. + + the model dict is ordered after python3.6, so we can use that minus + anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order + """ + if "categories" not in model: + categories = [ + k for k in model if k not in cls.NON_CATEGORY_FIELDS and not k.endswith("_index") + ] + model["categories"] = categories + else: + # add any columns not explicitly given an order at the end + categories = [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["categories"] + ] + model["categories"].extend(categories) + return model + linkml_meta = LinkMLMeta( { @@ -685,7 +734,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -713,7 +762,7 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 6104fd5..c1b0a8d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -1,7 +1,25 @@ from __future__ import annotations -from ...hdmf_common.v1_8_0.hdmf_common_base import Data +from datetime import datetime, date +from decimal import Decimal +from enum import Enum +import re +import sys +from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container import pandas as pd -from typing import Any, ClassVar, List, Dict, Optional, Union, Iterable, Tuple, overload +from typing import ( + Any, + ClassVar, + List, + Literal, + Dict, + Optional, + Union, + Generic, + Iterable, + Tuple, + TypeVar, + overload, +) from pydantic import ( BaseModel, ConfigDict, @@ -63,8 +81,10 @@ class LinkMLMeta(RootModel): NUMPYDANTIC_VERSION = "1.2.1" +T = TypeVar("T", bound=NDArray) -class VectorDataMixin(BaseModel): + +class VectorDataMixin(BaseModel, Generic[T]): """ Mixin class to give VectorData indexing abilities """ @@ -72,7 +92,7 @@ class VectorDataMixin(BaseModel): _index: Optional["VectorIndex"] = None # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): if value is not None and "value" not in kwargs: @@ -115,13 +135,13 @@ class VectorDataMixin(BaseModel): return len(self.value) -class VectorIndexMixin(BaseModel): +class VectorIndexMixin(BaseModel, Generic[T]): """ Mixin class to give VectorIndex indexing abilities """ # redefined in `VectorData`, but included here for testing and type checking - value: Optional[NDArray] = None + value: Optional[T] = None target: Optional["VectorData"] = None def __init__(self, value: Optional[NDArray] = None, **kwargs): @@ -402,20 +422,28 @@ class DynamicTableMixin(BaseModel): anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ if "colnames" not in model: - colnames = [ - k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - ] - model["colnames"] = colnames - else: - # add any columns not explicitly given an order at the end colnames = [ k for k in model if k not in cls.NON_COLUMN_FIELDS and not k.endswith("_index") - and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) ] - model["colnames"].extend(colnames) + model["colnames"] = colnames + else: + # add any columns not explicitly given an order at the end + colnames = model["colnames"].copy() + colnames.extend( + [ + k + for k in model + if k not in cls.NON_COLUMN_FIELDS + and not k.endswith("_index") + and k not in model["colnames"] + and not isinstance(model[k], VectorIndexMixin) + ] + ) + model["colnames"] = colnames return model @model_validator(mode="after") @@ -706,7 +734,7 @@ class DynamicTable(DynamicTableMixin): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, @@ -734,7 +762,7 @@ class AlignedDynamicTable(AlignedDynamicTableMixin, DynamicTable): description="""The names of the columns in this table. This should be used to specify an order to the columns.""", ) description: str = Field(..., description="""Description of what is in this dynamic table.""") - id: NDArray[Shape["* num_rows"], int] = Field( + id: VectorData[NDArray[Shape["* num_rows"], int]] = Field( ..., description="""Array of unique identifiers for the rows of this dynamic table.""", json_schema_extra={"linkml_meta": {"array": {"dimensions": [{"alias": "num_rows"}]}}}, From 43ff3503dc98e366bf5f677d01a654dfcad7b779 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 13 Aug 2024 23:38:53 -0700 Subject: [PATCH 47/61] more texts. Fix coercion with ndarrays and vectordata, incl numpydantic 1.3.3 making ndarray types callable --- nwb_linkml/pdm.lock | 231 ++++++++------------ nwb_linkml/pyproject.toml | 2 +- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 43 ++-- nwb_linkml/tests/test_includes/test_hdmf.py | 130 ++++++++++- 4 files changed, 241 insertions(+), 165 deletions(-) diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock index e3af7cb..3482eac 100644 --- a/nwb_linkml/pdm.lock +++ b/nwb_linkml/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "plot", "tests"] strategy = ["inherit_metadata"] lock_version = "4.5.0" -content_hash = "sha256:ed633a147948a9923f6b3a99690d5d8bad0b4b8c0d528abe62d132b05d1d9f39" +content_hash = "sha256:20f4f69a117fab515291f53ff39dfd1e13e3c8cbd399f1f7ce308cbea7deeaf0" [[metadata.targets]] requires_python = ">=3.10,<3.13" @@ -102,7 +102,7 @@ name = "blinker" version = "1.8.2" requires_python = ">=3.8" summary = "Fast, simple object-to-object and broadcast signaling" -groups = ["dev", "plot", "tests"] +groups = ["plot"] files = [ {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, @@ -219,7 +219,7 @@ name = "click" version = "8.1.7" requires_python = ">=3.7" summary = "Composable command line interface toolkit" -groups = ["default", "dev", "plot", "tests"] +groups = ["default", "dev", "plot"] dependencies = [ "colorama; platform_system == \"Windows\"", "importlib-metadata; python_version < \"3.8\"", @@ -242,81 +242,89 @@ files = [ [[package]] name = "coverage" -version = "6.5.0" -requires_python = ">=3.7" +version = "7.6.1" +requires_python = ">=3.8" summary = "Code coverage measurement for Python" groups = ["dev", "tests"] files = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [[package]] name = "coverage" -version = "6.5.0" +version = "7.6.1" extras = ["toml"] -requires_python = ">=3.7" +requires_python = ">=3.8" summary = "Code coverage measurement for Python" groups = ["dev", "tests"] dependencies = [ - "coverage==6.5.0", + "coverage==7.6.1", "tomli; python_full_version <= \"3.11.0a6\"", ] files = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] - -[[package]] -name = "coveralls" -version = "3.3.1" -requires_python = ">= 3.5" -summary = "Show coverage stats online via coveralls.io" -groups = ["dev", "tests"] -dependencies = [ - "coverage!=6.0.*,!=6.1,!=6.1.1,<7.0,>=4.1", - "docopt>=0.6.1", - "requests>=1.0.0", -] -files = [ - {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"}, - {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [[package]] @@ -340,7 +348,7 @@ name = "dash" version = "2.17.1" requires_python = ">=3.8" summary = "A Python framework for building reactive web-apps. Developed by Plotly." -groups = ["dev", "plot", "tests"] +groups = ["plot"] dependencies = [ "Flask<3.1,>=1.0.4", "Werkzeug<3.1", @@ -364,7 +372,7 @@ files = [ name = "dash-core-components" version = "2.0.0" summary = "Core component suite for Dash" -groups = ["dev", "plot", "tests"] +groups = ["plot"] files = [ {file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"}, {file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"}, @@ -374,7 +382,7 @@ files = [ name = "dash-cytoscape" version = "0.3.0" summary = "A Component Library for Dash aimed at facilitating network visualization in Python, wrapped around Cytoscape.js" -groups = ["dev", "plot", "tests"] +groups = ["plot"] dependencies = [ "dash", ] @@ -387,7 +395,7 @@ files = [ name = "dash-html-components" version = "2.0.0" summary = "Vanilla HTML components for Dash" -groups = ["dev", "plot", "tests"] +groups = ["plot"] files = [ {file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"}, {file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"}, @@ -397,7 +405,7 @@ files = [ name = "dash-table" version = "5.0.0" summary = "Dash table" -groups = ["dev", "plot", "tests"] +groups = ["plot"] files = [ {file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"}, {file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"}, @@ -417,15 +425,6 @@ files = [ {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, ] -[[package]] -name = "docopt" -version = "0.6.2" -summary = "Pythonic argument parser, that will make you smile" -groups = ["dev", "tests"] -files = [ - {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, -] - [[package]] name = "et-xmlfile" version = "1.1.0" @@ -454,7 +453,7 @@ name = "flask" version = "3.0.3" requires_python = ">=3.8" summary = "A simple framework for building complex web applications." -groups = ["dev", "plot", "tests"] +groups = ["plot"] dependencies = [ "Jinja2>=3.1.2", "Werkzeug>=3.0.0", @@ -496,17 +495,6 @@ files = [ {file = "future_fstrings-1.2.0.tar.gz", hash = "sha256:6cf41cbe97c398ab5a81168ce0dbb8ad95862d3caf23c21e4430627b90844089"}, ] -[[package]] -name = "gprof2dot" -version = "2024.6.6" -requires_python = ">=3.8" -summary = "Generate a dot graph from the output of several profilers." -groups = ["dev", "tests"] -files = [ - {file = "gprof2dot-2024.6.6-py2.py3-none-any.whl", hash = "sha256:45b14ad7ce64e299c8f526881007b9eb2c6b75505d5613e96e66ee4d5ab33696"}, - {file = "gprof2dot-2024.6.6.tar.gz", hash = "sha256:fa1420c60025a9eb7734f65225b4da02a10fc6dd741b37fa129bc6b41951e5ab"}, -] - [[package]] name = "graphviz" version = "0.20.3" @@ -608,7 +596,7 @@ name = "importlib-metadata" version = "8.2.0" requires_python = ">=3.8" summary = "Read metadata from Python packages" -groups = ["dev", "plot", "tests"] +groups = ["plot"] dependencies = [ "typing-extensions>=3.6.4; python_version < \"3.8\"", "zipp>=0.5", @@ -661,7 +649,7 @@ name = "itsdangerous" version = "2.2.0" requires_python = ">=3.8" summary = "Safely pass data to untrusted environments and back." -groups = ["dev", "plot", "tests"] +groups = ["plot"] files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -672,7 +660,7 @@ name = "jinja2" version = "3.1.4" requires_python = ">=3.7" summary = "A very fast and expressive template engine." -groups = ["default", "dev", "plot", "tests"] +groups = ["default", "plot"] dependencies = [ "MarkupSafe>=2.0", ] @@ -915,7 +903,7 @@ name = "markupsafe" version = "2.1.5" requires_python = ">=3.7" summary = "Safely add untrusted strings to HTML/XML markup." -groups = ["default", "dev", "plot", "tests"] +groups = ["default", "plot"] files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, @@ -977,7 +965,7 @@ name = "nest-asyncio" version = "1.6.0" requires_python = ">=3.5" summary = "Patch asyncio to allow nested event loops" -groups = ["dev", "plot", "tests"] +groups = ["plot"] files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -1036,7 +1024,7 @@ files = [ [[package]] name = "numpydantic" -version = "1.3.1" +version = "1.3.3" requires_python = "<4.0,>=3.9" summary = "Type and shape validation and serialization for numpy arrays in pydantic models" groups = ["default"] @@ -1046,8 +1034,8 @@ dependencies = [ "typing-extensions>=4.11.0; python_version < \"3.11\"", ] files = [ - {file = "numpydantic-1.3.1-py3-none-any.whl", hash = "sha256:c0a37c093fcd0e4ed52c4556f4e804eec76fcf924c546e475509e662336f9f61"}, - {file = "numpydantic-1.3.1.tar.gz", hash = "sha256:d61868d7912f2dfee9906bd989399d74f470dee10d5028409c2f5d39529fc4af"}, + {file = "numpydantic-1.3.3-py3-none-any.whl", hash = "sha256:e002767252b1b77abb7715834ab7cbf58964baddae44863710f09e71b23287e4"}, + {file = "numpydantic-1.3.3.tar.gz", hash = "sha256:1cc2744f7b5fbcecd51a64fafaf8c9a564bb296336a566a16be97ba7b1c28698"}, ] [[package]] @@ -1166,7 +1154,7 @@ name = "plotly" version = "5.23.0" requires_python = ">=3.8" summary = "An open-source, interactive data visualization library for Python" -groups = ["dev", "plot", "tests"] +groups = ["plot"] dependencies = [ "packaging", "tenacity>=6.2.0", @@ -1458,35 +1446,6 @@ files = [ {file = "pytest-logging-2015.11.4.tar.gz", hash = "sha256:cec5c85ecf18aab7b2ead5498a31b9f758680ef5a902b9054ab3f2bdbb77c896"}, ] -[[package]] -name = "pytest-md" -version = "0.2.0" -requires_python = ">=3.6" -summary = "Plugin for generating Markdown reports for pytest results" -groups = ["dev", "tests"] -dependencies = [ - "pytest>=4.2.1", -] -files = [ - {file = "pytest-md-0.2.0.tar.gz", hash = "sha256:3b248d5b360ea5198e05b4f49c7442234812809a63137ec6cdd3643a40cf0112"}, - {file = "pytest_md-0.2.0-py3-none-any.whl", hash = "sha256:4c4cd16fea6d1485e87ee254558712c804a96d2aa9674b780e7eb8fb6526e1d1"}, -] - -[[package]] -name = "pytest-profiling" -version = "1.7.0" -summary = "Profiling plugin for py.test" -groups = ["dev", "tests"] -dependencies = [ - "gprof2dot", - "pytest", - "six", -] -files = [ - {file = "pytest-profiling-1.7.0.tar.gz", hash = "sha256:93938f147662225d2b8bd5af89587b979652426a8a6ffd7e73ec4a23e24b7f29"}, - {file = "pytest_profiling-1.7.0-py2.py3-none-any.whl", hash = "sha256:999cc9ac94f2e528e3f5d43465da277429984a1c237ae9818f8cfd0b06acb019"}, -] - [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1670,7 +1629,7 @@ files = [ name = "retrying" version = "1.3.4" summary = "Retrying" -groups = ["dev", "plot", "tests"] +groups = ["plot"] dependencies = [ "six>=1.7.0", ] @@ -1858,13 +1817,13 @@ files = [ [[package]] name = "setuptools" -version = "72.1.0" +version = "72.2.0" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" -groups = ["dev", "plot", "tests"] +groups = ["plot"] files = [ - {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, - {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, + {file = "setuptools-72.2.0-py3-none-any.whl", hash = "sha256:f11dd94b7bae3a156a95ec151f24e4637fb4fa19c878e4d191bfb8b2d82728c4"}, + {file = "setuptools-72.2.0.tar.gz", hash = "sha256:80aacbf633704e9c8bfa1d99fa5dd4dc59573efcf9e4042c13d3bcef91ac2ef9"}, ] [[package]] @@ -1987,7 +1946,7 @@ name = "tenacity" version = "9.0.0" requires_python = ">=3.8" summary = "Retry code until it succeeds" -groups = ["dev", "plot", "tests"] +groups = ["plot"] files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -2135,7 +2094,7 @@ name = "werkzeug" version = "3.0.3" requires_python = ">=3.8" summary = "The comprehensive WSGI web application library." -groups = ["dev", "plot", "tests"] +groups = ["plot"] dependencies = [ "MarkupSafe>=2.1.1", ] @@ -2190,7 +2149,7 @@ name = "zipp" version = "3.20.0" requires_python = ">=3.8" summary = "Backport of pathlib-compatible object wrapper for zip files" -groups = ["dev", "plot", "tests"] +groups = ["plot"] files = [ {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index ffe0f54..f149733 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -20,7 +20,7 @@ dependencies = [ "pydantic-settings>=2.0.3", "tqdm>=4.66.1", 'typing-extensions>=4.12.2;python_version<"3.11"', - "numpydantic>=1.3.1", + "numpydantic>=1.3.3", "black>=24.4.2", "pandas>=2.2.2", ] diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index e4b9ff1..6c0c8bc 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -49,7 +49,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -249,8 +249,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -258,22 +259,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -285,6 +284,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -323,17 +324,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class VectorDataMixin(BaseModel, Generic[T]): diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index bde829b..c5a164f 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -1,6 +1,8 @@ import numpy as np import pandas as pd +import pytest from numpydantic import NDArray, Shape +from pydantic import ValidationError from nwb_linkml.includes import hdmf from nwb_linkml.includes.hdmf import DynamicTableMixin, VectorDataMixin, VectorIndexMixin @@ -289,17 +291,125 @@ def test_dynamictable_mixin_getattr(): """ class MyDT(DynamicTableMixin): - existing_col: NDArray[Shape["* col"], int] - - class AModel(DynamicTableMixin): - col: hdmf.VectorData[NDArray[Shape["3, 3"], int]] + existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]] col = hdmf.VectorData(value=np.arange(10)) inst = MyDT(existing_col=col) - # regular lookup for attrs that exist - # pdb.set_trace() - # inst.existing_col - # assert inst.existing_col == col - # df lookup otherwise - # inst.columns + # regular lookup for attrs that exist + assert isinstance(inst.existing_col, hdmf.VectorData) + assert all(inst.existing_col.value == col.value) + + # df lookup for thsoe that don't + assert isinstance(inst.columns, pd.Index) + + +def test_dynamictable_coercion(): + """ + Dynamictable should coerce arrays into vectordata objects for known and unknown cols + """ + + class MyDT(DynamicTableMixin): + existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]] + + cols = { + "existing_col": np.arange(10), + "new_col_1": np.arange(10), + } + inst = MyDT(**cols) + assert isinstance(inst.existing_col, hdmf.VectorData) + assert isinstance(inst.new_col_1, hdmf.VectorData) + assert all(inst.existing_col.value == np.arange(10)) + assert all(inst.new_col_1.value == np.arange(10)) + + +def test_dynamictable_create_id(): + class MyDT(DynamicTableMixin): + existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]] + + cols = { + "existing_col": np.arange(10), + } + inst = MyDT(**cols) + + assert all(inst.id == np.arange(10)) + + +def test_dynamictable_resolve_index(): + """ + Dynamictable should resolve and connect data to indices, explicit and implicit + """ + + class MyDT(DynamicTableMixin): + existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]] + + cols = { + "existing_col": np.arange(10), + "new_col_1": hdmf.VectorData(value=np.arange(10)), + "new_col_2": hdmf.VectorData(value=np.arange(10)), + } + # explicit index with mismatching name + cols["weirdname_index"] = hdmf.VectorIndex(value=np.arange(10), target=cols["new_col_1"]) + # implicit index with matching name + cols["new_col_2_index"] = hdmf.VectorIndex(value=np.arange(10)) + + inst = MyDT(**cols) + assert inst.weirdname_index.target is inst.new_col_1 + assert inst.new_col_2_index.target is inst.new_col_2 + assert inst.new_col_1._index is inst.weirdname_index + assert inst.new_col_2._index is inst.new_col_2_index + + +def dynamictable_assert_equal_length(): + """ + Dynamictable validates that columns are of equal length + """ + + class MyDT(DynamicTableMixin): + existing_col: NDArray[Shape["* col"], int] + + cols = { + "existing_col": np.arange(10), + "new_col_1": hdmf.VectorData(value=np.arange(11)), + } + with pytest.raises(ValidationError, pattern="Columns are not of equal length"): + _ = MyDT(**cols) + + cols = { + "existing_col": np.arange(11), + "new_col_1": hdmf.VectorData(value=np.arange(10)), + } + with pytest.raises(ValidationError, pattern="Columns are not of equal length"): + _ = MyDT(**cols) + + # wrong lengths are fine as long as the index is good + cols = { + "existing_col": np.arange(10), + "new_col_1": hdmf.VectorData(value=np.arange(100)), + "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 10) + 10), + } + _ = MyDT(**cols) + + # but not fine if the index is not good + cols = { + "existing_col": np.arange(10), + "new_col_1": hdmf.VectorData(value=np.arange(100)), + "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 5) + 5), + } + with pytest.raises(ValidationError, pattern="Columns are not of equal length"): + _ = MyDT(**cols) + + +def test_vectordata_generic_numpydantic_validation(): + """ + Using VectorData as a generic with a numpydantic array annotation should still validate + + Simple test here because numpydantic validation is tested in numpydantic itself, + we just want to check that the annotations work as validation and it doesn't just + """ + + class MyDT(DynamicTableMixin): + existing_col: NDArray[Shape["3 col"], int] + + with pytest.raises(ValidationError): + _ = MyDT(existing_col=np.zeros((4, 5, 6), dtype=int)) From d61d1ecf2272d3a883b8bd6b5b29c5830e836917 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Tue, 13 Aug 2024 23:39:11 -0700 Subject: [PATCH 48/61] regenerate models --- .../hdmf_common/v1_1_0/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_1_2/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_1_3/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_2_0/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_2_1/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_3_0/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_4_0/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_5_0/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_5_1/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_6_0/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_7_0/hdmf_common_table.py | 43 +++++++++++-------- .../hdmf_common/v1_8_0/hdmf_common_table.py | 43 +++++++++++-------- 12 files changed, 300 insertions(+), 216 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 4676d58..e37cea0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -245,7 +245,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -445,8 +445,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -454,22 +455,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -481,6 +480,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -519,17 +520,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 5066800..128e62f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -245,7 +245,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -445,8 +445,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -454,22 +455,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -481,6 +480,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -519,17 +520,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 02d0686..12b84b1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -245,7 +245,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -445,8 +445,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -454,22 +455,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -481,6 +480,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -519,17 +520,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index 2d3ccc4..9c99479 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -246,7 +246,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -446,8 +446,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -455,22 +456,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -482,6 +481,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -520,17 +521,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index df0d847..b487609 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -246,7 +246,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -446,8 +446,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -455,22 +456,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -482,6 +481,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -520,17 +521,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 7d465e2..14214b5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -246,7 +246,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -446,8 +446,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -455,22 +456,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -482,6 +481,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -520,17 +521,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index 47400e6..f79660e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -246,7 +246,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -446,8 +446,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -455,22 +456,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -482,6 +481,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -520,17 +521,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 92c5eeb..6f6d0b0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -246,7 +246,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -446,8 +446,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -455,22 +456,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -482,6 +481,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -520,17 +521,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 755b884..3590de9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -246,7 +246,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -446,8 +446,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -455,22 +456,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -482,6 +481,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -520,17 +521,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index 63d007d..07b20e3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -246,7 +246,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -446,8 +446,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -455,22 +456,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -482,6 +481,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -520,17 +521,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 4be797e..5e7a82f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -246,7 +246,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -446,8 +446,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -455,22 +456,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -482,6 +481,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -520,17 +521,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index c1b0a8d..7a6660a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -246,7 +246,7 @@ class DynamicTableMixin(BaseModel): """ model_config = ConfigDict(extra="allow") - __pydantic_extra__: Dict[str, Union[list, "NDArray", "VectorDataMixin"]] + __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( "name", "colnames", @@ -446,8 +446,9 @@ class DynamicTableMixin(BaseModel): model["colnames"] = colnames return model - @model_validator(mode="after") - def cast_extra_columns(self) -> "DynamicTableMixin": + @model_validator(mode="before") + @classmethod + def cast_extra_columns(cls, model: Dict[str, Any]) -> Dict: """ If extra columns are passed as just lists or arrays, cast to VectorData before we resolve targets for VectorData and VectorIndex pairs. @@ -455,22 +456,20 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in self.__pydantic_extra__.items(): + for key, val in model.items(): + if key in cls.model_fields: + continue if not isinstance(val, (VectorData, VectorIndex)): try: if key.endswith("_index"): - self.__pydantic_extra__[key] = VectorIndex( - name=key, description="", value=val - ) + model[key] = VectorIndex(name=key, description="", value=val) else: - self.__pydantic_extra__[key] = VectorData( - name=key, description="", value=val - ) + model[key] = VectorData(name=key, description="", value=val) except ValidationError as e: raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e - return self + return model @model_validator(mode="after") def resolve_targets(self) -> "DynamicTableMixin": @@ -482,6 +481,8 @@ class DynamicTableMixin(BaseModel): # find an index idx = None for field_name in self.model_fields_set: + if field_name in self.NON_COLUMN_FIELDS or field_name == key: + continue # implicit name-based index field = getattr(self, field_name) if isinstance(field, VectorIndex) and ( @@ -520,17 +521,23 @@ class DynamicTableMixin(BaseModel): """ try: return handler(val) - except ValidationError: + except ValidationError as e: annotation = cls.model_fields[info.field_name].annotation if type(annotation).__name__ == "_UnionGenericAlias": annotation = annotation.__args__[0] - return handler( - annotation( - val, - name=info.field_name, - description=cls.model_fields[info.field_name].description, + try: + # should pass if we're supposed to be a VectorData column + # don't want to override intention here by insisting that it is + # *actually* a VectorData column in case an NDArray has been specified for now + return handler( + annotation( + val, + name=info.field_name, + description=cls.model_fields[info.field_name].description, + ) ) - ) + except Exception: + raise e class AlignedDynamicTableMixin(DynamicTableMixin): From 5780947fe67b3eb45e4e01dddf7a04b103c8ac71 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 14 Aug 2024 00:09:24 -0700 Subject: [PATCH 49/61] indexing tests, fix for pytest 8, logging tests --- nwb_linkml/pdm.lock | 33 ++++++++-------- nwb_linkml/pyproject.toml | 5 +-- nwb_linkml/src/nwb_linkml/maps/hdmf.py | 24 ------------ nwb_linkml/tests/test_includes/test_hdmf.py | 43 +++++++++++++++++++-- nwb_linkml/tests/test_logging.py | 35 +++++++++++++++++ 5 files changed, 93 insertions(+), 47 deletions(-) delete mode 100644 nwb_linkml/src/nwb_linkml/maps/hdmf.py create mode 100644 nwb_linkml/tests/test_logging.py diff --git a/nwb_linkml/pdm.lock b/nwb_linkml/pdm.lock index 3482eac..bea21e9 100644 --- a/nwb_linkml/pdm.lock +++ b/nwb_linkml/pdm.lock @@ -5,7 +5,7 @@ groups = ["default", "dev", "plot", "tests"] strategy = ["inherit_metadata"] lock_version = "4.5.0" -content_hash = "sha256:20f4f69a117fab515291f53ff39dfd1e13e3c8cbd399f1f7ce308cbea7deeaf0" +content_hash = "sha256:d9e9b2a7f48f3db3e59cd58907a18aa69f91595d07eca53be0bf53b5fb2ba990" [[metadata.targets]] requires_python = ">=3.10,<3.13" @@ -70,7 +70,7 @@ name = "black" version = "24.8.0" requires_python = ">=3.8" summary = "The uncompromising code formatter." -groups = ["default", "dev"] +groups = ["default"] dependencies = [ "click>=8.0.0", "mypy-extensions>=0.4.3", @@ -219,7 +219,7 @@ name = "click" version = "8.1.7" requires_python = ">=3.7" summary = "Composable command line interface toolkit" -groups = ["default", "dev", "plot"] +groups = ["default", "plot"] dependencies = [ "colorama; platform_system == \"Windows\"", "importlib-metadata; python_version < \"3.8\"", @@ -512,7 +512,7 @@ version = "3.0.3" requires_python = ">=3.7" summary = "Lightweight in-process concurrent programming" groups = ["default"] -marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"" +marker = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\"" files = [ {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, @@ -954,7 +954,7 @@ name = "mypy-extensions" version = "1.0.0" requires_python = ">=3.5" summary = "Type system extensions for programs checked with the mypy type checker." -groups = ["default", "dev"] +groups = ["default"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -1132,7 +1132,7 @@ name = "pathspec" version = "0.12.1" requires_python = ">=3.8" summary = "Utility library for gitignore style pattern matching of file paths." -groups = ["default", "dev"] +groups = ["default"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1385,22 +1385,21 @@ files = [ [[package]] name = "pytest" -version = "7.4.4" -requires_python = ">=3.7" +version = "8.3.2" +requires_python = ">=3.8" summary = "pytest: simple powerful testing with Python" groups = ["default", "dev", "tests"] dependencies = [ "colorama; sys_platform == \"win32\"", "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", - "importlib-metadata>=0.12; python_version < \"3.8\"", "iniconfig", "packaging", - "pluggy<2.0,>=0.12", - "tomli>=1.0.0; python_version < \"3.11\"", + "pluggy<2,>=1.5", + "tomli>=1; python_version < \"3.11\"", ] files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [[package]] @@ -1759,7 +1758,7 @@ version = "0.2.8" requires_python = ">=3.6" summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" groups = ["default"] -marker = "platform_python_implementation == \"CPython\" and python_version < \"3.13\"" +marker = "platform_python_implementation == \"CPython\"" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, @@ -1932,13 +1931,13 @@ files = [ [[package]] name = "sybil" -version = "5.0.3" +version = "6.1.1" requires_python = ">=3.7" summary = "Automated testing for the examples in your code and documentation." groups = ["dev", "tests"] files = [ - {file = "sybil-5.0.3-py3-none-any.whl", hash = "sha256:6f3c30822169895c4fb34c8366bdb132cf62bb68fb1d03d2ebb05282eab08c95"}, - {file = "sybil-5.0.3.tar.gz", hash = "sha256:20dfe3a35a8d1ffcb4311434d1abf38c030c91064d75ff6b56ddd1060e08e758"}, + {file = "sybil-6.1.1-py3-none-any.whl", hash = "sha256:04ae5e17997bc5166ba3da0d6244767c397e129f399a1aa23c89b30a704fec2c"}, + {file = "sybil-6.1.1.tar.gz", hash = "sha256:8fb4f2c3582d1fe6705d1ae3a31f93fec7619f634940a8fcbf5d1b7d18183917"}, ] [[package]] diff --git a/nwb_linkml/pyproject.toml b/nwb_linkml/pyproject.toml index f149733..f603a55 100644 --- a/nwb_linkml/pyproject.toml +++ b/nwb_linkml/pyproject.toml @@ -37,16 +37,15 @@ plot = [ ] tests = [ "nwb-linkml", - "pytest<8.0.0,>=7.4.0", + "pytest>=8.0.0", "pytest-depends<2.0.0,>=1.0.1", "pytest-cov<5.0.0,>=4.1.0", - "sybil<6.0.0,>=5.0.3", + "sybil>=6.0.3", "requests-cache>=1.2.1", ] dev = [ "nwb-linkml[tests]", "ruff>=0.5.0", - "black>=24.4.2", ] [tool.pdm] diff --git a/nwb_linkml/src/nwb_linkml/maps/hdmf.py b/nwb_linkml/src/nwb_linkml/maps/hdmf.py deleted file mode 100644 index 3c52b22..0000000 --- a/nwb_linkml/src/nwb_linkml/maps/hdmf.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Mapping functions for handling HDMF classes like DynamicTables -""" - -from typing import Any, List, Optional - -import h5py - - -def dereference_reference_vector(dset: h5py.Dataset, data: Optional[List[Any]]) -> List: - """ - Given a compound dataset with indices, counts, and object references, dereference to values - - Data is of the form - (idx_start, count, target) - """ - # assume all these references are to the same target - # and the index is in the 3rd position - if data is None: - data = dset[:] - - target = dset.parent.get(data[0][-1]) - res = [target[d[0] : d[0] + d[1]] for d in data] - return res diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index c5a164f..9654301 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -213,10 +213,47 @@ def test_aligned_dynamictable(intracellular_recordings_table): def test_dynamictable_mixin_indexing(): """ - This is just a placeholder test to say that indexing is tested above - with actual model objects in case i ever ctrl+f for this + Can index values from a dynamictable """ - pass + + class MyData(DynamicTableMixin): + col_1: hdmf.VectorData[NDArray[Shape["*"], int]] + col_2: hdmf.VectorData[NDArray[Shape["*"], int]] + col_3: hdmf.VectorData[NDArray[Shape["*"], int]] + + cols = { + "col_1": np.arange(10), + "col_2": np.arange(10), + "col_3": np.arange(10), + "col_4": np.arange(10), + "col_5": np.arange(10), + } + colnames = [c for c in cols] + inst = MyData(**cols) + + row = inst[0] + # successfully get a single row :) + assert row.shape == (1, 5) + assert row.columns.tolist() == colnames + + # slice a range of rows + rows = inst[0:3] + assert rows.shape == (3, 5) + + # get a single column + col = inst["col_1"] + assert all(col.value == np.arange(10)) + + # get a single cell + val = inst[5, "col_2"] + assert val == 5 + val = inst[5, 1] + assert val == 5 + + # get a slice of rows and columns + subsection = inst[0:3, 0:3] + assert subsection.shape == (3, 3) + assert subsection.columns.tolist() == colnames[0:3] def test_dynamictable_mixin_colnames(): diff --git a/nwb_linkml/tests/test_logging.py b/nwb_linkml/tests/test_logging.py new file mode 100644 index 0000000..4585ed1 --- /dev/null +++ b/nwb_linkml/tests/test_logging.py @@ -0,0 +1,35 @@ +from pathlib import Path + +from nwb_linkml.logging import init_logger + + +def test_init_logger(capsys, tmp_path): + """ + We should be able to + - log to file and stdout + - with separable levels + """ + + log_dir = Path(tmp_path) / "logs" + log_dir.mkdir() + log_file = log_dir / "nwb_linkml.test_logger.log" + logger = init_logger(name="test_logger", log_dir=log_dir, level="INFO", file_level="WARNING") + warn_msg = "Both loggers should show" + logger.warning(warn_msg) + + # can't test for presence of string because logger can split lines depending on size of console + # but there should be one WARNING in stdout + captured = capsys.readouterr() + assert "WARNING" in captured.out + + with open(log_file, "r") as lfile: + log_str = lfile.read() + assert "WARNING" in log_str + + info_msg = "Now only stdout should show" + logger.info(info_msg) + captured = capsys.readouterr() + assert "INFO" in captured.out + with open(log_file, "r") as lfile: + log_str = lfile.read() + assert "INFO" not in log_str From b610f32c4b02defb8ba890d22a31e15118eb3b52 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 14 Aug 2024 00:21:06 -0700 Subject: [PATCH 50/61] vectordata indexing tests --- nwb_linkml/tests/test_includes/test_hdmf.py | 31 +++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 9654301..100a70f 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -437,6 +437,37 @@ def dynamictable_assert_equal_length(): _ = MyDT(**cols) +def test_vectordata_indexing(): + """ + Vectordata/VectorIndex pairs should know how to index off each other + """ + n_rows = 50 + value_array, index_array = _ragged_array(n_rows) + value_array = np.concat(value_array) + + data = hdmf.VectorData(value=value_array) + + # before we have an index, things should work as normal, indexing a 1D array + assert data[0] == 0 + + index = hdmf.VectorIndex(value=index_array, target=data) + data._index = index + + # after an index, both objects should index raggedly + for i in range(len(index)): + assert all(data[i] == i) + assert all(index[i] == i) + + for item in (data, index): + section = item[0:3] + for i, subitem in enumerate(section): + assert all(subitem == i) + + # setting uses the same indexing logic + data[0][:] = 5 + assert all(data[0] == 5) + + def test_vectordata_generic_numpydantic_validation(): """ Using VectorData as a generic with a numpydantic array annotation should still validate From 7cb8eea6fe89dc05a6c75a88d09baa77a5727bf3 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 14 Aug 2024 00:21:48 -0700 Subject: [PATCH 51/61] lint --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 2 +- nwb_linkml/tests/test_logging.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 6c0c8bc..58cfb8c 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -340,7 +340,7 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None class VectorDataMixin(BaseModel, Generic[T]): diff --git a/nwb_linkml/tests/test_logging.py b/nwb_linkml/tests/test_logging.py index 4585ed1..ddabac1 100644 --- a/nwb_linkml/tests/test_logging.py +++ b/nwb_linkml/tests/test_logging.py @@ -22,7 +22,7 @@ def test_init_logger(capsys, tmp_path): captured = capsys.readouterr() assert "WARNING" in captured.out - with open(log_file, "r") as lfile: + with open(log_file) as lfile: log_str = lfile.read() assert "WARNING" in log_str @@ -30,6 +30,6 @@ def test_init_logger(capsys, tmp_path): logger.info(info_msg) captured = capsys.readouterr() assert "INFO" in captured.out - with open(log_file, "r") as lfile: + with open(log_file) as lfile: log_str = lfile.read() assert "INFO" not in log_str From 36add1a306980654aaf44fae6210445f5395c381 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 14 Aug 2024 22:17:03 -0700 Subject: [PATCH 52/61] region tests --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 70 ++- nwb_linkml/tests/test_includes/test_hdmf.py | 549 ++++++++++++-------- 2 files changed, 404 insertions(+), 215 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 58cfb8c..9763ab3 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -48,9 +48,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -116,6 +117,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -133,11 +135,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -149,31 +155,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -303,8 +318,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -430,9 +445,21 @@ class VectorIndexMixin(BaseModel, Generic[T]): raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + # __getitem__ will return the indexed reference to the target + self[key] = value else: self.value[key] = value @@ -463,9 +490,19 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__( + self, item: Union[slice, Iterable] + ) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -486,6 +523,10 @@ class DynamicTableRegionMixin(BaseModel): if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] @@ -737,3 +778,8 @@ if "pytest" in sys.modules: """VectorIndex subclass for testing""" pass + + class DynamicTableRegion(DynamicTableRegionMixin, VectorData): + """DynamicTableRegion subclass for testing""" + + pass diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 100a70f..420a5ae 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -1,3 +1,5 @@ +from typing import Optional + import numpy as np import pandas as pd import pytest @@ -9,213 +11,19 @@ from nwb_linkml.includes.hdmf import DynamicTableMixin, VectorDataMixin, VectorI # FIXME: Make this just be the output of the provider by patching into import machinery from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( - DynamicTable, - DynamicTableRegion, ElectrodeGroup, - VectorIndex, VoltageClampStimulusSeries, ) from .conftest import _ragged_array - -def test_dynamictable_indexing(electrical_series): - """ - Can index values from a dynamictable - """ - series, electrodes = electrical_series - - colnames = [ - "id", - "x", - "y", - "group", - "group_name", - "location", - "extra_column", - ] - dtypes = [ - np.dtype("int64"), - np.dtype("float64"), - np.dtype("float64"), - ] + ([np.dtype("O")] * 4) - - row = electrodes[0] - # successfully get a single row :) - assert row.shape == (1, 7) - assert row.dtypes.values.tolist() == dtypes - assert row.columns.tolist() == colnames - - # slice a range of rows - rows = electrodes[0:3] - assert rows.shape == (3, 7) - assert rows.dtypes.values.tolist() == dtypes - assert rows.columns.tolist() == colnames - - # get a single column - col = electrodes["y"] - assert all(col.value == [5, 6, 7, 8, 9]) - - # get a single cell - val = electrodes[0, "y"] - assert val == 5 - val = electrodes[0, 2] - assert val == 5 - - # get a slice of rows and columns - subsection = electrodes[0:3, 0:3] - assert subsection.shape == (3, 3) - assert subsection.columns.tolist() == colnames[0:3] - assert subsection.dtypes.values.tolist() == dtypes[0:3] - - -def test_dynamictable_ragged(units): - """ - Should be able to index ragged arrays using an implicit _index column - - Also tests: - - passing arrays directly instead of wrapping in vectordata/index specifically, - if the models in the fixture instantiate then this works - """ - units, spike_times, spike_idx = units - - # ensure we don't pivot to long when indexing - assert units[0].shape[0] == 1 - # check that we got the indexing boundaries corrunect - # (and that we are forwarding attr calls to the dataframe by accessing shape - for i in range(units.shape[0]): - assert np.all(units.iloc[i, 0] == spike_times[i]) - - -def test_dynamictable_region_basic(electrical_series): - """ - DynamicTableRegion should be able to refer to a row or rows of another table - itself as a column within a table - """ - series, electrodes = electrical_series - row = series.electrodes[0] - # check that we correctly got the 4th row instead of the 0th row, - # since the indexed table was constructed with inverted indexes because it's a test, ya dummy. - # we will only vaguely check the basic functionality here bc - # a) the indexing behavior of the indexed objects is tested above, and - # b) every other object in the chain is strictly validated, - # so we assume if we got a right shaped df that it is the correct one. - # feel free to @ me when i am wrong about this - assert all(row.id == 4) - assert row.shape == (1, 7) - # and we should still be preserving the model that is the contents of the cell of this row - # so this is a dataframe row with a column "group" that contains an array of ElectrodeGroup - # objects and that's as far as we are going to chase the recursion in this basic indexing test - # ElectrodeGroup is strictly validating so an instance check is all we need. - assert isinstance(row.group.values[0], ElectrodeGroup) - - # getting a list of table rows is actually correct behavior here because - # this list of table rows is actually the cell of another table - rows = series.electrodes[0:3] - assert all([all(row.id == idx) for row, idx in zip(rows, [4, 3, 2])]) - - -def test_dynamictable_region_ragged(): - """ - Dynamictables can also have indexes so that they are ragged arrays of column rows - """ - spike_times, spike_idx = _ragged_array(24) - spike_times_flat = np.concatenate(spike_times) - - # construct a secondary index that selects overlapping segments of the first table - value = np.array([0, 1, 2, 1, 2, 3, 2, 3, 4]) - idx = np.array([3, 6, 9]) - - table = DynamicTable( - name="table", - description="a table what else would it be", - id=np.arange(len(spike_idx)), - timeseries=spike_times_flat, - timeseries_index=spike_idx, - ) - region = DynamicTableRegion( - name="dynamictableregion", - description="this field should be optional", - table=table, - value=value, - ) - index = VectorIndex(name="index", description="hgggggggjjjj", target=region, value=idx) - region._index = index - rows = region[1] - # i guess this is right? - # the region should be a set of three rows of the table, with a ragged array column timeseries - # like... - # - # id timeseries - # 0 1 [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, ... - # 1 2 [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, ... - # 2 3 [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, ... - assert rows.shape == (3, 2) - assert all(rows.id == [1, 2, 3]) - assert all([all(row[1].timeseries == i) for i, row in zip([1, 2, 3], rows.iterrows())]) - - -def test_dynamictable_append_column(): - pass - - -def test_dynamictable_append_row(): - pass - - -def test_dynamictable_extra_coercion(): - """ - Extra fields should be coerced to VectorData and have their - indexing relationships handled when passed as plain arrays. - """ - - -def test_aligned_dynamictable(intracellular_recordings_table): - """ - Multiple aligned dynamictables should be indexable with a multiindex - """ - # can get a single row.. (check correctness below) - row = intracellular_recordings_table[0] - # can get a single table with its name - stimuli = intracellular_recordings_table["stimuli"] - assert stimuli.shape == (10, 1) - - # nab a few rows to make the dataframe - rows = intracellular_recordings_table[0:3] - assert all( - rows.columns - == pd.MultiIndex.from_tuples( - [ - ("electrodes", "index"), - ("electrodes", "electrode"), - ("stimuli", "index"), - ("stimuli", "stimulus"), - ("responses", "index"), - ("responses", "response"), - ] - ) - ) - - # ensure that we get the actual values from the TimeSeriesReferenceVectorData - # also tested separately - # each individual cell should be an array of VoltageClampStimulusSeries... - # and then we should be able to index within that as well - stims = rows["stimuli", "stimulus"][0] - for i in range(len(stims)): - assert isinstance(stims[i], VoltageClampStimulusSeries) - assert all([i == val for val in stims[i][:]]) - - # -------------------------------------------------- -# Direct mixin tests +# Unit tests on mixins directly (model tests below) # -------------------------------------------------- -def test_dynamictable_mixin_indexing(): - """ - Can index values from a dynamictable - """ - +@pytest.fixture() +def basic_table() -> tuple[DynamicTableMixin, dict[str, NDArray[Shape["10"], int]]]: class MyData(DynamicTableMixin): col_1: hdmf.VectorData[NDArray[Shape["*"], int]] col_2: hdmf.VectorData[NDArray[Shape["*"], int]] @@ -228,8 +36,18 @@ def test_dynamictable_mixin_indexing(): "col_4": np.arange(10), "col_5": np.arange(10), } + return MyData, cols + + +def test_dynamictable_mixin_indexing(basic_table): + """ + Can index values from a dynamictable + """ + MyData, cols = basic_table + colnames = [c for c in cols] inst = MyData(**cols) + assert len(inst) == 10 row = inst[0] # successfully get a single row :) @@ -251,9 +69,28 @@ def test_dynamictable_mixin_indexing(): assert val == 5 # get a slice of rows and columns - subsection = inst[0:3, 0:3] - assert subsection.shape == (3, 3) - assert subsection.columns.tolist() == colnames[0:3] + val = inst[0:3, 0:3] + assert val.shape == (3, 3) + assert val.columns.tolist() == colnames[0:3] + + # slice of rows with string colname + val = inst[0:2, "col_1"] + assert val.shape == (2, 1) + assert val.columns.tolist() == ["col_1"] + + # array of rows + # crazy slow but we'll work on perf later + val = inst[np.arange(2), "col_1"] + assert val.shape == (2, 1) + assert val.columns.tolist() == ["col_1"] + + # should raise an error on a 3d index + with pytest.raises(ValueError, match=".*2-dimensional.*"): + _ = inst[1, 1, 1] + + # error on unhandled indexing type + with pytest.raises(ValueError, match="Unsure how to get item with key.*"): + _ = inst[5.5] def test_dynamictable_mixin_colnames(): @@ -337,9 +174,12 @@ def test_dynamictable_mixin_getattr(): assert isinstance(inst.existing_col, hdmf.VectorData) assert all(inst.existing_col.value == col.value) - # df lookup for thsoe that don't + # df lookup for those that don't assert isinstance(inst.columns, pd.Index) + with pytest.raises(AttributeError): + _ = inst.really_fake_name_that_pandas_and_pydantic_definitely_dont_define + def test_dynamictable_coercion(): """ @@ -348,15 +188,19 @@ def test_dynamictable_coercion(): class MyDT(DynamicTableMixin): existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]] + optional_col: Optional[hdmf.VectorData[NDArray[Shape["* col"], int]]] cols = { "existing_col": np.arange(10), + "optional_col": np.arange(10), "new_col_1": np.arange(10), } inst = MyDT(**cols) assert isinstance(inst.existing_col, hdmf.VectorData) + assert isinstance(inst.optional_col, hdmf.VectorData) assert isinstance(inst.new_col_1, hdmf.VectorData) assert all(inst.existing_col.value == np.arange(10)) + assert all(inst.optional_col.value == np.arange(10)) assert all(inst.new_col_1.value == np.arange(10)) @@ -409,14 +253,14 @@ def dynamictable_assert_equal_length(): "existing_col": np.arange(10), "new_col_1": hdmf.VectorData(value=np.arange(11)), } - with pytest.raises(ValidationError, pattern="Columns are not of equal length"): + with pytest.raises(ValidationError, match="Columns are not of equal length"): _ = MyDT(**cols) cols = { "existing_col": np.arange(11), "new_col_1": hdmf.VectorData(value=np.arange(10)), } - with pytest.raises(ValidationError, pattern="Columns are not of equal length"): + with pytest.raises(ValidationError, match="Columns are not of equal length"): _ = MyDT(**cols) # wrong lengths are fine as long as the index is good @@ -437,6 +281,32 @@ def dynamictable_assert_equal_length(): _ = MyDT(**cols) +def test_dynamictable_setattr(): + """ + Setting a new column as an attribute adds it to colnames and reruns validations + """ + + class MyDT(DynamicTableMixin): + existing_col: hdmf.VectorData[NDArray[Shape["* col"], int]] + + cols = { + "existing_col": hdmf.VectorData(value=np.arange(10)), + "new_col_1": hdmf.VectorData(value=np.arange(10)), + } + inst = MyDT(existing_col=cols["existing_col"]) + assert inst.colnames == ["existing_col"] + + inst.new_col_1 = cols["new_col_1"] + assert inst.colnames == ["existing_col", "new_col_1"] + assert inst[:].columns.tolist() == ["existing_col", "new_col_1"] + # length unchanged because id should be the same + assert len(inst) == 10 + + # model validators should be called to ensure equal length + with pytest.raises(ValidationError): + inst.new_col_2 = hdmf.VectorData(value=np.arange(11)) + + def test_vectordata_indexing(): """ Vectordata/VectorIndex pairs should know how to index off each other @@ -449,6 +319,10 @@ def test_vectordata_indexing(): # before we have an index, things should work as normal, indexing a 1D array assert data[0] == 0 + # and setting values + data[0] = 1 + assert data[0] == 1 + data[0] = 0 index = hdmf.VectorIndex(value=index_array, target=data) data._index = index @@ -468,6 +342,31 @@ def test_vectordata_indexing(): assert all(data[0] == 5) +def test_vectordata_getattr(): + """ + VectorData and VectorIndex both forward getattr to ``value`` + """ + data = hdmf.VectorData(value=np.arange(100)) + index = hdmf.VectorIndex(value=np.arange(10, 101, 10), target=data) + + # get attrs that we defined on the models + # i.e. no attribute errors here + _ = data.model_fields + _ = index.model_fields + + # but for things that aren't defined, get the numpy method + # note that index should not try and get the sum from the target - + # that would be hella confusing. we only refer to the target when indexing. + assert data.sum() == np.sum(np.arange(100)) + assert index.sum() == np.sum(np.arange(10, 101, 10)) + + # and also raise attribute errors when nothing is found + with pytest.raises(AttributeError): + _ = data.super_fake_attr_name + with pytest.raises(AttributeError): + _ = index.super_fake_attr_name + + def test_vectordata_generic_numpydantic_validation(): """ Using VectorData as a generic with a numpydantic array annotation should still validate @@ -481,3 +380,247 @@ def test_vectordata_generic_numpydantic_validation(): with pytest.raises(ValidationError): _ = MyDT(existing_col=np.zeros((4, 5, 6), dtype=int)) + + +@pytest.mark.xfail +def test_dynamictable_append_row(): + raise NotImplementedError("Reminder to implement row appending") + + +def test_dynamictable_region_indexing(basic_table): + """ + Without an index, DynamicTableRegion should just be a single-row index into + another table + """ + model, cols = basic_table + inst = model(**cols) + + index = np.array([9, 4, 8, 3, 7, 2, 6, 1, 5, 0]) + + table_region = hdmf.DynamicTableRegion(value=index, table=inst) + + row = table_region[1] + assert all(row.iloc[0] == index[1]) + + # slices + rows = table_region[3:5] + assert all(rows[0].iloc[0] == index[3]) + assert all(rows[1].iloc[0] == index[4]) + assert len(rows) == 2 + assert all([row.shape == (1, 5) for row in rows]) + + # out of order fine too + oorder = [2, 5, 4] + rows = table_region[oorder] + assert len(rows) == 3 + assert all([row.shape == (1, 5) for row in rows]) + for i, idx in enumerate(oorder): + assert all(rows[i].iloc[0] == index[idx]) + + # also works when used as a column in a table + class AnotherTable(DynamicTableMixin): + region: hdmf.DynamicTableRegion + another_col: hdmf.VectorData[NDArray[Shape["*"], int]] + + inst2 = AnotherTable(region=table_region, another_col=np.arange(10)) + rows = inst2[0:3] + col = rows.region + for i in range(3): + assert all(col[i].iloc[0] == index[i]) + + +def test_dynamictable_region_ragged(): + """ + Dynamictables can also have indexes so that they are ragged arrays of column rows + """ + spike_times, spike_idx = _ragged_array(24) + spike_times_flat = np.concatenate(spike_times) + + # construct a secondary index that selects overlapping segments of the first table + value = np.array([0, 1, 2, 1, 2, 3, 2, 3, 4]) + idx = np.array([3, 6, 9]) + + table = DynamicTableMixin( + name="table", + description="a table what else would it be", + id=np.arange(len(spike_idx)), + another_column=np.arange(len(spike_idx) - 1, -1, -1), + timeseries=spike_times_flat, + timeseries_index=spike_idx, + ) + region = hdmf.DynamicTableRegion( + table=table, + value=value, + ) + index = hdmf.VectorIndex(name="index", description="hgggggggjjjj", target=region, value=idx) + region._index = index + + rows = region[1] + # i guess this is right? + # the region should be a set of three rows of the table, with a ragged array column timeseries + # like... + # + # id timeseries + # 0 1 [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, ... + # 1 2 [2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, ... + # 2 3 [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, ... + assert rows.shape == (3, 2) + assert all(rows.index.to_numpy() == [1, 2, 3]) + assert all([all(row[1].timeseries == i) for i, row in zip([1, 2, 3], rows.iterrows())]) + + rows = region[0:2] + for i in range(2): + assert all( + [all(row[1].timeseries == i) for i, row in zip(range(i, i + 3), rows[i].iterrows())] + ) + + # also works when used as a column in a table + class AnotherTable(DynamicTableMixin): + region: hdmf.DynamicTableRegion + yet_another_col: hdmf.VectorData[NDArray[Shape["*"], int]] + + inst2 = AnotherTable(region=region, yet_another_col=np.arange(len(idx))) + row = inst2[0] + assert row.shape == (1, 2) + assert row.iloc[0, 0].equals(region[0]) + + rows = inst2[0:3] + for i, df in enumerate(rows.iloc[:, 0]): + assert df.equals(region[i]) + + +# -------------------------------------------------- +# Model-based tests +# -------------------------------------------------- + + +def test_dynamictable_indexing_electricalseries(electrical_series): + """ + Can index values from a dynamictable + """ + series, electrodes = electrical_series + + colnames = [ + "id", + "x", + "y", + "group", + "group_name", + "location", + "extra_column", + ] + dtypes = [ + np.dtype("int64"), + np.dtype("float64"), + np.dtype("float64"), + ] + ([np.dtype("O")] * 4) + + row = electrodes[0] + # successfully get a single row :) + assert row.shape == (1, 7) + assert row.dtypes.values.tolist() == dtypes + assert row.columns.tolist() == colnames + + # slice a range of rows + rows = electrodes[0:3] + assert rows.shape == (3, 7) + assert rows.dtypes.values.tolist() == dtypes + assert rows.columns.tolist() == colnames + + # get a single column + col = electrodes["y"] + assert all(col.value == [5, 6, 7, 8, 9]) + + # get a single cell + val = electrodes[0, "y"] + assert val == 5 + val = electrodes[0, 2] + assert val == 5 + + # get a slice of rows and columns + subsection = electrodes[0:3, 0:3] + assert subsection.shape == (3, 3) + assert subsection.columns.tolist() == colnames[0:3] + assert subsection.dtypes.values.tolist() == dtypes[0:3] + + +def test_dynamictable_ragged_units(units): + """ + Should be able to index ragged arrays using an implicit _index column + + Also tests: + - passing arrays directly instead of wrapping in vectordata/index specifically, + if the models in the fixture instantiate then this works + """ + units, spike_times, spike_idx = units + + # ensure we don't pivot to long when indexing + assert units[0].shape[0] == 1 + # check that we got the indexing boundaries corrunect + # (and that we are forwarding attr calls to the dataframe by accessing shape + for i in range(units.shape[0]): + assert np.all(units.iloc[i, 0] == spike_times[i]) + + +def test_dynamictable_region_basic_electricalseries(electrical_series): + """ + DynamicTableRegion should be able to refer to a row or rows of another table + itself as a column within a table + """ + series, electrodes = electrical_series + row = series.electrodes[0] + # check that we correctly got the 4th row instead of the 0th row, + # since the indexed table was constructed with inverted indexes because it's a test, ya dummy. + # we will only vaguely check the basic functionality here bc + # a) the indexing behavior of the indexed objects is tested above, and + # b) every other object in the chain is strictly validated, + # so we assume if we got a right shaped df that it is the correct one. + # feel free to @ me when i am wrong about this + assert all(row.id == 4) + assert row.shape == (1, 7) + # and we should still be preserving the model that is the contents of the cell of this row + # so this is a dataframe row with a column "group" that contains an array of ElectrodeGroup + # objects and that's as far as we are going to chase the recursion in this basic indexing test + # ElectrodeGroup is strictly validating so an instance check is all we need. + assert isinstance(row.group.values[0], ElectrodeGroup) + + # getting a list of table rows is actually correct behavior here because + # this list of table rows is actually the cell of another table + rows = series.electrodes[0:3] + assert all([all(row.id == idx) for row, idx in zip(rows, [4, 3, 2])]) + + +def test_aligned_dynamictable_ictable(intracellular_recordings_table): + """ + Multiple aligned dynamictables should be indexable with a multiindex + """ + # can get a single row.. (check correctness below) + row = intracellular_recordings_table[0] + # can get a single table with its name + stimuli = intracellular_recordings_table["stimuli"] + assert stimuli.shape == (10, 1) + + # nab a few rows to make the dataframe + rows = intracellular_recordings_table[0:3] + assert all( + rows.columns + == pd.MultiIndex.from_tuples( + [ + ("electrodes", "index"), + ("electrodes", "electrode"), + ("stimuli", "index"), + ("stimuli", "stimulus"), + ("responses", "index"), + ("responses", "response"), + ] + ) + ) + + # ensure that we get the actual values from the TimeSeriesReferenceVectorData + # also tested separately + # each individual cell should be an array of VoltageClampStimulusSeries... + # and then we should be able to index within that as well + stims = rows["stimuli", "stimulus"][0] + for i in range(len(stims)): + assert isinstance(stims[i], VoltageClampStimulusSeries) + assert all([i == val for val in stims[i][:]]) From b601027ef6973bb92cf6f8f1e0933360a15aa4df Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 14 Aug 2024 22:26:29 -0700 Subject: [PATCH 53/61] lint --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 9763ab3..9383be9 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -496,9 +496,7 @@ class DynamicTableRegionMixin(BaseModel): def __getitem__(self, item: int) -> pd.DataFrame: ... @overload - def __getitem__( - self, item: Union[slice, Iterable] - ) -> List[pd.DataFrame]: ... + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... def __getitem__( self, item: Union[int, slice, Iterable] From 54409c7b280aedc4205641075a93483eee418370 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 14 Aug 2024 22:27:01 -0700 Subject: [PATCH 54/61] codespell istg --- nwb_linkml/tests/test_includes/test_hdmf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 420a5ae..7e34c66 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -153,7 +153,7 @@ def test_dynamictable_mixin_colnames_ordered(): assert all([key1 == key2 for key1, key2 in zip(order, inst._columns)]) assert all(inst[0].columns == order) - # partial lists should append unnamed columsn at the end + # partial lists should append unnamed columns at the end partial_order = ["new_col_3", "new_col_2"] inst = MyDT(**cols, colnames=partial_order) assert inst.colnames == [*partial_order, "existing_col", "new_col_1"] From 7e7cbc1ac16f06e4f2528308aecb669219b1e75d Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Wed, 14 Aug 2024 23:03:03 -0700 Subject: [PATCH 55/61] fix setattr for index and data --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 37 +++++++++++++++------ nwb_linkml/tests/test_includes/test_hdmf.py | 23 +++++++++++-- 2 files changed, 46 insertions(+), 14 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 9383be9..82022de 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -65,10 +65,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -283,7 +279,7 @@ class DynamicTableMixin(BaseModel): model[key] = VectorIndex(name=key, description="", value=val) else: model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: + except ValidationError as e: # pragma: no cover raise ValidationError( f"field {key} cannot be cast to VectorData from {val}" ) from e @@ -423,24 +419,24 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] + return [self.target.value[self._slice(i)] for i in item] else: raise AttributeError(f"Could not index with {item}") @@ -458,8 +454,27 @@ class VectorIndexMixin(BaseModel, Generic[T]): """ if self.target: - # __getitem__ will return the indexed reference to the target - self[key] = value + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 7e34c66..32a4e5f 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -241,7 +241,7 @@ def test_dynamictable_resolve_index(): assert inst.new_col_2._index is inst.new_col_2_index -def dynamictable_assert_equal_length(): +def test_dynamictable_assert_equal_length(): """ Dynamictable validates that columns are of equal length """ @@ -277,7 +277,7 @@ def dynamictable_assert_equal_length(): "new_col_1": hdmf.VectorData(value=np.arange(100)), "new_col_1_index": hdmf.VectorIndex(value=np.arange(0, 100, 5) + 5), } - with pytest.raises(ValidationError, pattern="Columns are not of equal length"): + with pytest.raises(ValidationError, match="Columns are not of equal length"): _ = MyDT(**cols) @@ -324,6 +324,15 @@ def test_vectordata_indexing(): assert data[0] == 1 data[0] = 0 + # indexes by themselves are the same + index_notarget = hdmf.VectorIndex(value=index_array) + assert index_notarget[0] == index_array[0] + assert all(index_notarget[0:3] == index_array[0:3]) + oldval = index_array[0] + index_notarget[0] = 5 + assert index_notarget[0] == 5 + index_notarget[0] = oldval + index = hdmf.VectorIndex(value=index_array, target=data) data._index = index @@ -338,8 +347,16 @@ def test_vectordata_indexing(): assert all(subitem == i) # setting uses the same indexing logic - data[0][:] = 5 + data[0] = 5 assert all(data[0] == 5) + data[0:3] = [5, 4, 3] + assert all(data[0] == 5) + assert all(data[1] == 4) + assert all(data[2] == 3) + data[0:3] = 6 + assert all(data[0] == 6) + assert all(data[1] == 6) + assert all(data[2] == 6) def test_vectordata_getattr(): From ce096db349a0c0e29036b51d109bb24934cfa070 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 15 Aug 2024 00:57:44 -0700 Subject: [PATCH 56/61] aligned dynamictable tests --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 150 ++++++++++++++++---- nwb_linkml/tests/test_includes/test_hdmf.py | 109 +++++++++++++- 2 files changed, 233 insertions(+), 26 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 82022de..0c4e7ce 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -33,7 +33,7 @@ from pydantic import ( model_validator, ) -if TYPE_CHECKING: +if TYPE_CHECKING: # pragma: no cover from nwb_linkml.models import VectorData, VectorIndex T = TypeVar("T", bound=NDArray) @@ -211,6 +211,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -235,6 +237,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -270,19 +274,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: # pragma: no cover - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -437,7 +443,7 @@ class VectorIndexMixin(BaseModel, Generic[T]): if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.target.value[self._slice(i)] for i in item] - else: + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: @@ -530,7 +536,7 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): @@ -543,19 +549,26 @@ class DynamicTableRegionMixin(BaseModel): if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -573,7 +586,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -591,25 +604,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -636,6 +702,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): """ diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 32a4e5f..fb9a3e2 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Optional, Type import numpy as np import pandas as pd @@ -7,7 +7,12 @@ from numpydantic import NDArray, Shape from pydantic import ValidationError from nwb_linkml.includes import hdmf -from nwb_linkml.includes.hdmf import DynamicTableMixin, VectorDataMixin, VectorIndexMixin +from nwb_linkml.includes.hdmf import ( + AlignedDynamicTableMixin, + DynamicTableMixin, + VectorDataMixin, + VectorIndexMixin, +) # FIXME: Make this just be the output of the provider by patching into import machinery from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( @@ -39,6 +44,33 @@ def basic_table() -> tuple[DynamicTableMixin, dict[str, NDArray[Shape["10"], int return MyData, cols +@pytest.fixture() +def aligned_table() -> tuple[Type[AlignedDynamicTableMixin], dict[str, DynamicTableMixin]]: + class Table1(DynamicTableMixin): + col1: hdmf.VectorData[NDArray[Shape["*"], int]] + col2: hdmf.VectorData[NDArray[Shape["*"], int]] + + class Table2(DynamicTableMixin): + col3: hdmf.VectorData[NDArray[Shape["*"], int]] + col4: hdmf.VectorData[NDArray[Shape["*"], int]] + + class Table3(DynamicTableMixin): + col5: hdmf.VectorData[NDArray[Shape["*"], int]] + col6: hdmf.VectorData[NDArray[Shape["*"], int]] + + array = np.arange(10) + + table1 = Table1(col1=array, col2=array) + table2 = Table2(col3=array, col4=array) + table3 = Table3(col5=array, col6=array) + + class AlignedTable(AlignedDynamicTableMixin): + table1: Table1 + table2: Table2 + + return AlignedTable, {"table1": table1, "table2": table2, "table3": table3} + + def test_dynamictable_mixin_indexing(basic_table): """ Can index values from a dynamictable @@ -357,6 +389,8 @@ def test_vectordata_indexing(): assert all(data[0] == 6) assert all(data[1] == 6) assert all(data[2] == 6) + with pytest.raises(ValueError, match=".*equal-length.*"): + data[0:3] = [5, 4] def test_vectordata_getattr(): @@ -506,6 +540,77 @@ def test_dynamictable_region_ragged(): assert df.equals(region[i]) +def test_aligned_dynamictable_indexing(aligned_table): + """ + Should be able to index aligned dynamic tables to yield a multi-index df + """ + AlignedTable, tables = aligned_table + atable = AlignedTable(**tables) + + row = atable[0] + assert all( + row.columns + == pd.MultiIndex.from_tuples( + [ + ("table1", "index"), + ("table1", "col1"), + ("table1", "col2"), + ("table2", "index"), + ("table2", "col3"), + ("table2", "col4"), + ("table3", "index"), + ("table3", "col5"), + ("table3", "col6"), + ] + ) + ) + for i in range(len(atable)): + vals = atable[i] + assert vals.shape == (1, 9) + assert all(vals == i) + + # mildly different, indexing with a slice. + rows = atable[0:3] + for i, row in enumerate(rows.iterrows()): + vals = row[1] + assert len(vals) == 9 + assert all(vals == i) + + # index just a single table + row = atable[0:3, "table3"] + assert all(row.columns.to_numpy() == ["col5", "col6"]) + assert row.shape == (3, 2) + + # index out of order + rows = atable[np.array([0, 2, 1])] + assert all(rows.iloc[:, 0] == [0, 2, 1]) + + +def test_mixed_aligned_dynamictable(aligned_table): + """ + Aligned dynamictable should also accept vectordata/vector index pairs + """ + + AlignedTable, cols = aligned_table + value_array, index_array = _ragged_array(10) + value_array = np.concat(value_array) + + data = hdmf.VectorData(value=value_array) + index = hdmf.VectorIndex(value=index_array) + + atable = AlignedTable(**cols, extra_col=data, extra_col_index=index) + atable[0] + assert atable[0].columns[-1] == ("extra_col", "extra_col") + + for i, row in enumerate(atable[:].extra_col.iterrows()): + array = row[1].iloc[0] + assert all(array == i) + if i > 0: + assert len(array) == index_array[i] - index_array[i - 1] + else: + assert len(array) == index_array[i] + + # -------------------------------------------------- # Model-based tests # -------------------------------------------------- From 10965743ebefb1dd368eaf5bb872853f09a7da1a Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 15 Aug 2024 01:01:47 -0700 Subject: [PATCH 57/61] regenerate models --- .../hdmf_common/v1_1_0/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_1_2/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_1_3/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_2_0/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_2_1/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_3_0/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_4_0/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_5_0/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_5_1/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_6_0/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_7_0/hdmf_common_table.py | 244 ++++++++++++++---- .../hdmf_common/v1_8_0/hdmf_common_table.py | 244 ++++++++++++++---- 12 files changed, 2400 insertions(+), 528 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index e37cea0..f571cc5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -148,31 +148,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -203,9 +234,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -220,20 +259,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -244,9 +290,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -260,10 +307,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -312,6 +355,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -329,11 +373,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -345,31 +393,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -396,6 +453,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -420,6 +479,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -455,19 +516,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -499,8 +562,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -536,15 +599,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -562,7 +629,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -580,25 +647,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -625,6 +745,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 128e62f..17128ad 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -148,31 +148,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -203,9 +234,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -220,20 +259,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -244,9 +290,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -260,10 +307,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -312,6 +355,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -329,11 +373,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -345,31 +393,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -396,6 +453,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -420,6 +479,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -455,19 +516,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -499,8 +562,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -536,15 +599,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -562,7 +629,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -580,25 +647,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -625,6 +745,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 12b84b1..8b25e7d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -148,31 +148,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -203,9 +234,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -220,20 +259,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -244,9 +290,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -260,10 +307,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -312,6 +355,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -329,11 +373,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -345,31 +393,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -396,6 +453,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -420,6 +479,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -455,19 +516,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -499,8 +562,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -536,15 +599,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -562,7 +629,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -580,25 +647,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -625,6 +745,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py index 9c99479..561d242 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_0/hdmf_common_table.py @@ -149,31 +149,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -204,9 +235,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -221,20 +260,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -245,9 +291,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -261,10 +308,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -313,6 +356,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -330,11 +374,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -346,31 +394,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -397,6 +454,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -421,6 +480,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -456,19 +517,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -500,8 +563,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -537,15 +600,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -563,7 +630,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -581,25 +648,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -626,6 +746,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py index b487609..9ff2a6c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_2_1/hdmf_common_table.py @@ -149,31 +149,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -204,9 +235,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -221,20 +260,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -245,9 +291,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -261,10 +308,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -313,6 +356,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -330,11 +374,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -346,31 +394,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -397,6 +454,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -421,6 +480,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -456,19 +517,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -500,8 +563,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -537,15 +600,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -563,7 +630,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -581,25 +648,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -626,6 +746,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py index 14214b5..1ecc2ec 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_3_0/hdmf_common_table.py @@ -149,31 +149,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -204,9 +235,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -221,20 +260,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -245,9 +291,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -261,10 +308,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -313,6 +356,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -330,11 +374,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -346,31 +394,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -397,6 +454,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -421,6 +480,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -456,19 +517,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -500,8 +563,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -537,15 +600,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -563,7 +630,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -581,25 +648,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -626,6 +746,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py index f79660e..d37f163 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_4_0/hdmf_common_table.py @@ -149,31 +149,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -204,9 +235,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -221,20 +260,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -245,9 +291,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -261,10 +308,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -313,6 +356,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -330,11 +374,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -346,31 +394,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -397,6 +454,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -421,6 +480,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -456,19 +517,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -500,8 +563,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -537,15 +600,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -563,7 +630,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -581,25 +648,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -626,6 +746,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index 6f6d0b0..c53154c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -149,31 +149,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -204,9 +235,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -221,20 +260,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -245,9 +291,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -261,10 +308,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -313,6 +356,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -330,11 +374,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -346,31 +394,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -397,6 +454,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -421,6 +480,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -456,19 +517,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -500,8 +563,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -537,15 +600,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -563,7 +630,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -581,25 +648,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -626,6 +746,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py index 3590de9..39a4eb3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_1/hdmf_common_table.py @@ -149,31 +149,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -204,9 +235,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -221,20 +260,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -245,9 +291,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -261,10 +308,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -313,6 +356,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -330,11 +374,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -346,31 +394,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -397,6 +454,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -421,6 +480,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -456,19 +517,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -500,8 +563,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -537,15 +600,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -563,7 +630,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -581,25 +648,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -626,6 +746,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py index 07b20e3..da0bc73 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_6_0/hdmf_common_table.py @@ -149,31 +149,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -204,9 +235,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -221,20 +260,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -245,9 +291,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -261,10 +308,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -313,6 +356,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -330,11 +374,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -346,31 +394,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -397,6 +454,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -421,6 +480,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -456,19 +517,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -500,8 +563,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -537,15 +600,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -563,7 +630,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -581,25 +648,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -626,6 +746,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py index 5e7a82f..627f80d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_7_0/hdmf_common_table.py @@ -149,31 +149,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -204,9 +235,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -221,20 +260,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -245,9 +291,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -261,10 +308,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -313,6 +356,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -330,11 +374,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -346,31 +394,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -397,6 +454,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -421,6 +480,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -456,19 +517,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -500,8 +563,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -537,15 +600,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -563,7 +630,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -581,25 +648,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -626,6 +746,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py index 7a6660a..5a29869 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -149,31 +149,62 @@ class VectorIndexMixin(BaseModel, Generic[T]): kwargs["value"] = value super().__init__(**kwargs) - def _getitem_helper(self, arg: int) -> Union[list, NDArray]: + def _slice(self, arg: int) -> slice: """ Mimicking :func:`hdmf.common.table.VectorIndex.__getitem_helper` """ start = 0 if arg == 0 else self.value[arg - 1] end = self.value[arg] - return self.target.value[slice(start, end)] + return slice(start, end) def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: if self.target is None: return self.value[item] else: if isinstance(item, (int, np.integer)): - return self._getitem_helper(item) + return self.target.value[self._slice(item)] elif isinstance(item, (slice, Iterable)): if isinstance(item, slice): item = range(*item.indices(len(self.value))) - return [self._getitem_helper(i) for i in item] - else: + return [self.target.value[self._slice(i)] for i in item] + else: # pragma: no cover raise AttributeError(f"Could not index with {item}") def __setitem__(self, key: Union[int, slice], value: Any) -> None: - if self._index: - # VectorIndex is the thing that knows how to do the slicing - self._index[key] = value + """ + Set a value on the :attr:`.target` . + + .. note:: + + Even though we correct the indexing logic from HDMF where the + _data_ is the thing that is provided by the API when one accesses + table.data (rather than table.data_index as hdmf does), + we will set to the target here (rather than to the index) + to be consistent. To modify the index, modify `self.value` directly + + """ + if self.target: + if isinstance(key, (int, np.integer)): + self.target.value[self._slice(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.value))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " ragged values of of the target VectorData object if you need more" + " control" + ) + for i, subval in zip(key, value): + self.target.value[self._slice(i)] = subval + else: + for i in key: + self.target.value[self._slice(i)] = value + else: # pragma: no cover + raise AttributeError(f"Could not index with {key}") + else: self.value[key] = value @@ -204,9 +235,17 @@ class DynamicTableRegionMixin(BaseModel): _index: Optional["VectorIndex"] = None table: "DynamicTableMixin" - value: Optional[NDArray] = None + value: Optional[NDArray[Shape["*"], int]] = None - def __getitem__(self, item: Union[int, slice, Iterable]) -> Any: + @overload + def __getitem__(self, item: int) -> pd.DataFrame: ... + + @overload + def __getitem__(self, item: Union[slice, Iterable]) -> List[pd.DataFrame]: ... + + def __getitem__( + self, item: Union[int, slice, Iterable] + ) -> Union[pd.DataFrame, List[pd.DataFrame]]: """ Use ``value`` to index the table. Works analogously to ``VectorIndex`` despite this being a subclass of ``VectorData`` @@ -221,20 +260,27 @@ class DynamicTableRegionMixin(BaseModel): # so we index table with an array to construct # a list of lists of rows return [self.table[idx] for idx in self._index[item]] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") else: if isinstance(item, (int, np.integer)): return self.table[self.value[item]] elif isinstance(item, (slice, Iterable)): + # Return a list of dataframe rows because this is most often used + # as a column in a DynamicTable, so while it would normally be + # ideal to just return the slice as above as a single df, + # we need each row to be separate to fill the column if isinstance(item, slice): item = range(*item.indices(len(self.value))) return [self.table[self.value[i]] for i in item] - else: + else: # pragma: no cover raise ValueError(f"Dont know how to index with {item}, need an int or a slice") def __setitem__(self, key: Union[int, str, slice], value: Any) -> None: - self.table[self.value[key]] = value + # self.table[self.value[key]] = value + raise NotImplementedError( + "Assigning values to tables is not implemented yet!" + ) # pragma: no cover class DynamicTableMixin(BaseModel): @@ -245,9 +291,10 @@ class DynamicTableMixin(BaseModel): but simplifying along the way :) """ - model_config = ConfigDict(extra="allow") + model_config = ConfigDict(extra="allow", validate_assignment=True) __pydantic_extra__: Dict[str, Union["VectorDataMixin", "VectorIndexMixin", "NDArray", list]] NON_COLUMN_FIELDS: ClassVar[tuple[str]] = ( + "id", "name", "colnames", "description", @@ -261,10 +308,6 @@ class DynamicTableMixin(BaseModel): def _columns(self) -> Dict[str, Union[list, "NDArray", "VectorDataMixin"]]: return {k: getattr(self, k) for i, k in enumerate(self.colnames)} - @property - def _columns_list(self) -> List[Union[list, "NDArray", "VectorDataMixin"]]: - return [getattr(self, k) for i, k in enumerate(self.colnames)] - @overload def __getitem__(self, item: str) -> Union[list, "NDArray", "VectorDataMixin"]: ... @@ -313,6 +356,7 @@ class DynamicTableMixin(BaseModel): return self._columns[item] if isinstance(item, (int, slice, np.integer, np.ndarray)): data = self._slice_range(item) + index = self.id[item] elif isinstance(item, tuple): if len(item) != 2: raise ValueError( @@ -330,11 +374,15 @@ class DynamicTableMixin(BaseModel): return self._columns[cols][rows] data = self._slice_range(rows, cols) + index = self.id[rows] else: raise ValueError(f"Unsure how to get item with key {item}") # cast to DF - return pd.DataFrame(data) + if not isinstance(index, Iterable): + index = [index] + index = pd.Index(data=index) + return pd.DataFrame(data, index=index) def _slice_range( self, rows: Union[int, slice, np.ndarray], cols: Optional[Union[str, List[str]]] = None @@ -346,31 +394,40 @@ class DynamicTableMixin(BaseModel): data = {} for k in cols: if isinstance(rows, np.ndarray): + # help wanted - this is probably cr*zy slow val = [self._columns[k][i] for i in rows] else: val = self._columns[k][rows] # scalars need to be wrapped in series for pandas + # do this by the iterability of the rows index not the value because + # we want all lengths from this method to be equal, and if the rows are + # scalar, that means length == 1 if not isinstance(rows, (Iterable, slice)): - val = pd.Series([val]) + val = [val] data[k] = val return data def __setitem__(self, key: str, value: Any) -> None: - raise NotImplementedError("TODO") + raise NotImplementedError("TODO") # pragma: no cover def __setattr__(self, key: str, value: Union[list, "NDArray", "VectorData"]): """ Add a column, appending it to ``colnames`` """ # don't use this while building the model - if not getattr(self, "__pydantic_complete__", False): + if not getattr(self, "__pydantic_complete__", False): # pragma: no cover return super().__setattr__(key, value) if key not in self.model_fields_set and not key.endswith("_index"): self.colnames.append(key) + # we get a recursion error if we setattr without having first added to + # extras if we need it to be there + if key not in self.model_fields and key not in self.__pydantic_extra__: + self.__pydantic_extra__[key] = value + return super().__setattr__(key, value) def __getattr__(self, item: str) -> Any: @@ -397,6 +454,8 @@ class DynamicTableMixin(BaseModel): """ Create ID column if not provided """ + if not isinstance(model, dict): + return model if "id" not in model: lengths = [] for key, val in model.items(): @@ -421,6 +480,8 @@ class DynamicTableMixin(BaseModel): the model dict is ordered after python3.6, so we can use that minus anything in :attr:`.NON_COLUMN_FIELDS` to determine order implied from passage order """ + if not isinstance(model, dict): + return model if "colnames" not in model: colnames = [ k @@ -456,19 +517,21 @@ class DynamicTableMixin(BaseModel): See :meth:`.cast_specified_columns` for handling columns in the class specification """ # if columns are not in the specification, cast to a generic VectorData - for key, val in model.items(): - if key in cls.model_fields: - continue - if not isinstance(val, (VectorData, VectorIndex)): - try: - if key.endswith("_index"): - model[key] = VectorIndex(name=key, description="", value=val) - else: - model[key] = VectorData(name=key, description="", value=val) - except ValidationError as e: - raise ValidationError( - f"field {key} cannot be cast to VectorData from {val}" - ) from e + + if isinstance(model, dict): + for key, val in model.items(): + if key in cls.model_fields: + continue + if not isinstance(val, (VectorData, VectorIndex)): + try: + if key.endswith("_index"): + model[key] = VectorIndex(name=key, description="", value=val) + else: + model[key] = VectorData(name=key, description="", value=val) + except ValidationError as e: # pragma: no cover + raise ValidationError( + f"field {key} cannot be cast to VectorData from {val}" + ) from e return model @model_validator(mode="after") @@ -500,8 +563,8 @@ class DynamicTableMixin(BaseModel): """ Ensure that all columns are equal length """ - lengths = [len(v) for v in self._columns.values()] - assert [length == lengths[0] for length in lengths], ( + lengths = [len(v) for v in self._columns.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( "Columns are not of equal length! " f"Got colnames:\n{self.colnames}\nand lengths: {lengths}" ) @@ -537,15 +600,19 @@ class DynamicTableMixin(BaseModel): ) ) except Exception: - raise e + raise e from None -class AlignedDynamicTableMixin(DynamicTableMixin): +class AlignedDynamicTableMixin(BaseModel): """ Mixin to allow indexing multiple tables that are aligned on a common ID + + A great deal of code duplication because we need to avoid diamond inheritance + and also it's not so easy to copy a pydantic validator method. """ - __pydantic_extra__: Dict[str, "DynamicTableMixin"] + model_config = ConfigDict(extra="allow", validate_assignment=True) + __pydantic_extra__: Dict[str, Union["DynamicTableMixin", "VectorDataMixin", "VectorIndexMixin"]] NON_CATEGORY_FIELDS: ClassVar[tuple[str]] = ( "name", @@ -563,7 +630,7 @@ class AlignedDynamicTableMixin(DynamicTableMixin): return {k: getattr(self, k) for i, k in enumerate(self.categories)} def __getitem__( - self, item: Union[int, str, slice, Tuple[Union[int, slice], str]] + self, item: Union[int, str, slice, NDArray[Shape["*"], int], Tuple[Union[int, slice], str]] ) -> pd.DataFrame: """ Mimic hdmf: @@ -581,25 +648,78 @@ class AlignedDynamicTableMixin(DynamicTableMixin): elif isinstance(item, tuple) and len(item) == 2 and isinstance(item[1], str): # get a slice of a single table return self._categories[item[1]][item[0]] - elif isinstance(item, (int, slice)): + elif isinstance(item, (int, slice, Iterable)): # get a slice of all the tables ids = self.id[item] if not isinstance(ids, Iterable): ids = pd.Series([ids]) ids = pd.DataFrame({"id": ids}) - tables = [ids] + [table[item].reset_index() for table in self._categories.values()] + tables = [ids] + for category_name, category in self._categories.items(): + table = category[item] + if isinstance(table, pd.DataFrame): + table = table.reset_index() + elif isinstance(table, np.ndarray): + table = pd.DataFrame({category_name: [table]}) + elif isinstance(table, Iterable): + table = pd.DataFrame({category_name: table}) + else: + raise ValueError( + f"Don't know how to construct category table for {category_name}" + ) + tables.append(table) + names = [self.name] + self.categories # construct below in case we need to support array indexing in the future else: raise ValueError( f"Dont know how to index with {item}, " - "need an int, string, slice, or tuple[int | slice, str]" + "need an int, string, slice, ndarray, or tuple[int | slice, str]" ) df = pd.concat(tables, axis=1, keys=names) df.set_index((self.name, "id"), drop=True, inplace=True) return df + def __getattr__(self, item: str) -> Any: + """Try and use pandas df attrs if we don't have them""" + try: + return BaseModel.__getattr__(self, item) + except AttributeError as e: + try: + return getattr(self[:], item) + except AttributeError: + raise e from None + + def __len__(self) -> int: + """ + Use the id column to determine length. + + If the id column doesn't represent length accurately, it's a bug + """ + return len(self.id) + + @model_validator(mode="before") + @classmethod + def create_id(cls, model: Dict[str, Any]) -> Dict: + """ + Create ID column if not provided + """ + if "id" not in model: + lengths = [] + for key, val in model.items(): + # don't get lengths of columns with an index + if ( + f"{key}_index" in model + or (isinstance(val, VectorData) and val._index) + or key in cls.NON_CATEGORY_FIELDS + ): + continue + lengths.append(len(val)) + model["id"] = np.arange(np.max(lengths)) + + return model + @model_validator(mode="before") @classmethod def create_categories(cls, model: Dict[str, Any]) -> Dict: @@ -626,6 +746,42 @@ class AlignedDynamicTableMixin(DynamicTableMixin): model["categories"].extend(categories) return model + @model_validator(mode="after") + def resolve_targets(self) -> "DynamicTableMixin": + """ + Ensure that any implicitly indexed columns are linked, and create backlinks + """ + for key, col in self._categories.items(): + if isinstance(col, VectorData): + # find an index + idx = None + for field_name in self.model_fields_set: + if field_name in self.NON_CATEGORY_FIELDS or field_name == key: + continue + # implicit name-based index + field = getattr(self, field_name) + if isinstance(field, VectorIndex) and ( + field_name == f"{key}_index" or field.target is col + ): + idx = field + break + if idx is not None: + col._index = idx + idx.target = col + return self + + @model_validator(mode="after") + def ensure_equal_length_cols(self) -> "DynamicTableMixin": + """ + Ensure that all columns are equal length + """ + lengths = [len(v) for v in self._categories.values()] + [len(self.id)] + assert all([length == lengths[0] for length in lengths]), ( + "Columns are not of equal length! " + f"Got colnames:\n{self.categories}\nand lengths: {lengths}" + ) + return self + linkml_meta = LinkMLMeta( { From 24494b8ee4f7b4f35a9240c5aca7a517bfb8f026 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 15 Aug 2024 01:43:42 -0700 Subject: [PATCH 58/61] reference vector series tests, fix model tests --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 42 +++++++++++++------- nwb_linkml/tests/test_includes/test_hdmf.py | 43 +++++++++++++++++---- 2 files changed, 62 insertions(+), 23 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 0c4e7ce..7fa3c08 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -748,9 +748,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): for index, span, and timeseries """ - idx_start: NDArray[Any, int] - count: NDArray[Any, int] - timeseries: NDArray[Any, BaseModel] + idx_start: NDArray[Shape["*"], int] + count: NDArray[Shape["*"], int] + timeseries: NDArray @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": @@ -789,11 +789,11 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): ) if isinstance(item, (int, np.integer)): - return self.timeseries[self._slice_helper(item)] - elif isinstance(item, slice): - return [self.timeseries[subitem] for subitem in self._slice_helper(item)] - elif isinstance(item, Iterable): - return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + return self.timeseries[item][self._slice_helper(item)] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.idx_start))) + return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item] else: raise ValueError( f"Dont know how to index with {item}, must be an int, slice, or iterable" @@ -806,13 +806,22 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): " never done in the core schema." ) if isinstance(key, (int, np.integer)): - self.timeseries[self._slice_helper(key)] = value - elif isinstance(key, slice): - for subitem in self._slice_helper(key): - self.timeseries[subitem] = value - elif isinstance(key, Iterable): - for subitem in key: - self.timeseries[self._slice_helper(subitem)] = value + self.timeseries[key][self._slice_helper(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.idx_start))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " target Timeseries object if you need more control" + ) + for subitem, subvalue in zip(key, value): + self.timeseries[subitem][self._slice_helper(subitem)] = subvalue + else: + for subitem in key: + self.timeseries[subitem][self._slice_helper(subitem)] = value else: raise ValueError( f"Dont know how to index with {key}, must be an int, slice, or iterable" @@ -898,3 +907,6 @@ if "pytest" in sys.modules: """DynamicTableRegion subclass for testing""" pass + + class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin): + pass diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index fb9a3e2..6c5d51a 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -611,6 +611,35 @@ def test_mixed_aligned_dynamictable(aligned_table): assert len(array) == index_array[i] +def test_timeseriesreferencevectordata_index(): + """ + TimeSeriesReferenceVectorData should be able to do the thing it does + """ + generator = np.random.default_rng() + timeseries = np.array([np.arange(100)] * 10) + + counts = generator.integers(1, 10, (10,)) + idx_start = np.arange(0, 100, 10) + + response = hdmf.TimeSeriesReferenceVectorData( + idx_start=idx_start, + count=counts, + timeseries=timeseries, + ) + for i in range(len(counts)): + assert len(response[i]) == counts[i] + items = response[3:5] + assert all(items[0] == timeseries[3][idx_start[3] : idx_start[3] + counts[3]]) + assert all(items[1] == timeseries[4][idx_start[4] : idx_start[4] + counts[4]]) + + response[0] = np.zeros((counts[0],)) + assert all(response[0] == 0) + + response[1:3] = [np.zeros((counts[1],)), np.ones((counts[2],))] + assert all(response[1] == 0) + assert all(response[2] == 1) + + # -------------------------------------------------- # Model-based tests # -------------------------------------------------- @@ -623,7 +652,6 @@ def test_dynamictable_indexing_electricalseries(electrical_series): series, electrodes = electrical_series colnames = [ - "id", "x", "y", "group", @@ -632,20 +660,19 @@ def test_dynamictable_indexing_electricalseries(electrical_series): "extra_column", ] dtypes = [ - np.dtype("int64"), np.dtype("float64"), np.dtype("float64"), ] + ([np.dtype("O")] * 4) row = electrodes[0] # successfully get a single row :) - assert row.shape == (1, 7) + assert row.shape == (1, 6) assert row.dtypes.values.tolist() == dtypes assert row.columns.tolist() == colnames # slice a range of rows rows = electrodes[0:3] - assert rows.shape == (3, 7) + assert rows.shape == (3, 6) assert rows.dtypes.values.tolist() == dtypes assert rows.columns.tolist() == colnames @@ -656,7 +683,7 @@ def test_dynamictable_indexing_electricalseries(electrical_series): # get a single cell val = electrodes[0, "y"] assert val == 5 - val = electrodes[0, 2] + val = electrodes[0, 1] assert val == 5 # get a slice of rows and columns @@ -698,8 +725,8 @@ def test_dynamictable_region_basic_electricalseries(electrical_series): # b) every other object in the chain is strictly validated, # so we assume if we got a right shaped df that it is the correct one. # feel free to @ me when i am wrong about this - assert all(row.id == 4) - assert row.shape == (1, 7) + assert all(row.index == 4) + assert row.shape == (1, 6) # and we should still be preserving the model that is the contents of the cell of this row # so this is a dataframe row with a column "group" that contains an array of ElectrodeGroup # objects and that's as far as we are going to chase the recursion in this basic indexing test @@ -709,7 +736,7 @@ def test_dynamictable_region_basic_electricalseries(electrical_series): # getting a list of table rows is actually correct behavior here because # this list of table rows is actually the cell of another table rows = series.electrodes[0:3] - assert all([all(row.id == idx) for row, idx in zip(rows, [4, 3, 2])]) + assert all([all(row.index == idx) for row, idx in zip(rows, [4, 3, 2])]) def test_aligned_dynamictable_ictable(intracellular_recordings_table): From f5a41734948c0ff2de4fb3585290d77d3b792f13 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 15 Aug 2024 01:47:55 -0700 Subject: [PATCH 59/61] update models --- .../pydantic/core/v2_4_0/core_nwb_base.py | 39 ++++++++++++------- .../pydantic/core/v2_5_0/core_nwb_base.py | 39 ++++++++++++------- .../core/v2_6_0_alpha/core_nwb_base.py | 39 ++++++++++++------- .../pydantic/core/v2_7_0/core_nwb_base.py | 39 ++++++++++++------- 4 files changed, 96 insertions(+), 60 deletions(-) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py index 2328c2c..9ec8413 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -134,9 +134,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): for index, span, and timeseries """ - idx_start: NDArray[Any, int] - count: NDArray[Any, int] - timeseries: NDArray[Any, BaseModel] + idx_start: NDArray[Shape["*"], int] + count: NDArray[Shape["*"], int] + timeseries: NDArray @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": @@ -175,11 +175,11 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): ) if isinstance(item, (int, np.integer)): - return self.timeseries[self._slice_helper(item)] - elif isinstance(item, slice): - return [self.timeseries[subitem] for subitem in self._slice_helper(item)] - elif isinstance(item, Iterable): - return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + return self.timeseries[item][self._slice_helper(item)] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.idx_start))) + return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item] else: raise ValueError( f"Dont know how to index with {item}, must be an int, slice, or iterable" @@ -192,13 +192,22 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): " never done in the core schema." ) if isinstance(key, (int, np.integer)): - self.timeseries[self._slice_helper(key)] = value - elif isinstance(key, slice): - for subitem in self._slice_helper(key): - self.timeseries[subitem] = value - elif isinstance(key, Iterable): - for subitem in key: - self.timeseries[self._slice_helper(subitem)] = value + self.timeseries[key][self._slice_helper(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.idx_start))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " target Timeseries object if you need more control" + ) + for subitem, subvalue in zip(key, value): + self.timeseries[subitem][self._slice_helper(subitem)] = subvalue + else: + for subitem in key: + self.timeseries[subitem][self._slice_helper(subitem)] = value else: raise ValueError( f"Dont know how to index with {key}, must be an int, slice, or iterable" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py index d080acf..aefea7a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -145,9 +145,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): for index, span, and timeseries """ - idx_start: NDArray[Any, int] - count: NDArray[Any, int] - timeseries: NDArray[Any, BaseModel] + idx_start: NDArray[Shape["*"], int] + count: NDArray[Shape["*"], int] + timeseries: NDArray @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": @@ -186,11 +186,11 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): ) if isinstance(item, (int, np.integer)): - return self.timeseries[self._slice_helper(item)] - elif isinstance(item, slice): - return [self.timeseries[subitem] for subitem in self._slice_helper(item)] - elif isinstance(item, Iterable): - return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + return self.timeseries[item][self._slice_helper(item)] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.idx_start))) + return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item] else: raise ValueError( f"Dont know how to index with {item}, must be an int, slice, or iterable" @@ -203,13 +203,22 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): " never done in the core schema." ) if isinstance(key, (int, np.integer)): - self.timeseries[self._slice_helper(key)] = value - elif isinstance(key, slice): - for subitem in self._slice_helper(key): - self.timeseries[subitem] = value - elif isinstance(key, Iterable): - for subitem in key: - self.timeseries[self._slice_helper(subitem)] = value + self.timeseries[key][self._slice_helper(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.idx_start))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " target Timeseries object if you need more control" + ) + for subitem, subvalue in zip(key, value): + self.timeseries[subitem][self._slice_helper(subitem)] = subvalue + else: + for subitem in key: + self.timeseries[subitem][self._slice_helper(subitem)] = value else: raise ValueError( f"Dont know how to index with {key}, must be an int, slice, or iterable" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index 26677a0..624853e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -145,9 +145,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): for index, span, and timeseries """ - idx_start: NDArray[Any, int] - count: NDArray[Any, int] - timeseries: NDArray[Any, BaseModel] + idx_start: NDArray[Shape["*"], int] + count: NDArray[Shape["*"], int] + timeseries: NDArray @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": @@ -186,11 +186,11 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): ) if isinstance(item, (int, np.integer)): - return self.timeseries[self._slice_helper(item)] - elif isinstance(item, slice): - return [self.timeseries[subitem] for subitem in self._slice_helper(item)] - elif isinstance(item, Iterable): - return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + return self.timeseries[item][self._slice_helper(item)] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.idx_start))) + return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item] else: raise ValueError( f"Dont know how to index with {item}, must be an int, slice, or iterable" @@ -203,13 +203,22 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): " never done in the core schema." ) if isinstance(key, (int, np.integer)): - self.timeseries[self._slice_helper(key)] = value - elif isinstance(key, slice): - for subitem in self._slice_helper(key): - self.timeseries[subitem] = value - elif isinstance(key, Iterable): - for subitem in key: - self.timeseries[self._slice_helper(subitem)] = value + self.timeseries[key][self._slice_helper(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.idx_start))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " target Timeseries object if you need more control" + ) + for subitem, subvalue in zip(key, value): + self.timeseries[subitem][self._slice_helper(subitem)] = subvalue + else: + for subitem in key: + self.timeseries[subitem][self._slice_helper(subitem)] = value else: raise ValueError( f"Dont know how to index with {key}, must be an int, slice, or iterable" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py index 096733f..09a6f8b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -145,9 +145,9 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): for index, span, and timeseries """ - idx_start: NDArray[Any, int] - count: NDArray[Any, int] - timeseries: NDArray[Any, BaseModel] + idx_start: NDArray[Shape["*"], int] + count: NDArray[Shape["*"], int] + timeseries: NDArray @model_validator(mode="after") def ensure_equal_length(self) -> "TimeSeriesReferenceVectorDataMixin": @@ -186,11 +186,11 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): ) if isinstance(item, (int, np.integer)): - return self.timeseries[self._slice_helper(item)] - elif isinstance(item, slice): - return [self.timeseries[subitem] for subitem in self._slice_helper(item)] - elif isinstance(item, Iterable): - return [self.timeseries[self._slice_helper(subitem)] for subitem in item] + return self.timeseries[item][self._slice_helper(item)] + elif isinstance(item, (slice, Iterable)): + if isinstance(item, slice): + item = range(*item.indices(len(self.idx_start))) + return [self.timeseries[subitem][self._slice_helper(subitem)] for subitem in item] else: raise ValueError( f"Dont know how to index with {item}, must be an int, slice, or iterable" @@ -203,13 +203,22 @@ class TimeSeriesReferenceVectorDataMixin(VectorDataMixin): " never done in the core schema." ) if isinstance(key, (int, np.integer)): - self.timeseries[self._slice_helper(key)] = value - elif isinstance(key, slice): - for subitem in self._slice_helper(key): - self.timeseries[subitem] = value - elif isinstance(key, Iterable): - for subitem in key: - self.timeseries[self._slice_helper(subitem)] = value + self.timeseries[key][self._slice_helper(key)] = value + elif isinstance(key, (slice, Iterable)): + if isinstance(key, slice): + key = range(*key.indices(len(self.idx_start))) + + if isinstance(value, Iterable): + if len(key) != len(value): + raise ValueError( + "Can only assign equal-length iterable to a slice, manually index the" + " target Timeseries object if you need more control" + ) + for subitem, subvalue in zip(key, value): + self.timeseries[subitem][self._slice_helper(subitem)] = subvalue + else: + for subitem in key: + self.timeseries[subitem][self._slice_helper(subitem)] = value else: raise ValueError( f"Dont know how to index with {key}, must be an int, slice, or iterable" From 3cda6c3e7c51a58da1249591ceb9a36a60656cdd Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 15 Aug 2024 01:48:34 -0700 Subject: [PATCH 60/61] lint --- nwb_linkml/src/nwb_linkml/includes/hdmf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nwb_linkml/src/nwb_linkml/includes/hdmf.py b/nwb_linkml/src/nwb_linkml/includes/hdmf.py index 7fa3c08..19bbf34 100644 --- a/nwb_linkml/src/nwb_linkml/includes/hdmf.py +++ b/nwb_linkml/src/nwb_linkml/includes/hdmf.py @@ -909,4 +909,6 @@ if "pytest" in sys.modules: pass class TimeSeriesReferenceVectorData(TimeSeriesReferenceVectorDataMixin): + """TimeSeriesReferenceVectorData subclass for testing""" + pass From ec7a8254fe94ac0511f80804a9fc1e47edb0ea63 Mon Sep 17 00:00:00 2001 From: sneakers-the-rat Date: Thu, 15 Aug 2024 01:55:15 -0700 Subject: [PATCH 61/61] fix tsrv test --- nwb_linkml/tests/test_includes/test_hdmf.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/nwb_linkml/tests/test_includes/test_hdmf.py b/nwb_linkml/tests/test_includes/test_hdmf.py index 6c5d51a..cb2b974 100644 --- a/nwb_linkml/tests/test_includes/test_hdmf.py +++ b/nwb_linkml/tests/test_includes/test_hdmf.py @@ -17,7 +17,6 @@ from nwb_linkml.includes.hdmf import ( # FIXME: Make this just be the output of the provider by patching into import machinery from nwb_linkml.models.pydantic.core.v2_7_0.namespace import ( ElectrodeGroup, - VoltageClampStimulusSeries, ) from .conftest import _ragged_array @@ -769,7 +768,6 @@ def test_aligned_dynamictable_ictable(intracellular_recordings_table): # also tested separately # each individual cell should be an array of VoltageClampStimulusSeries... # and then we should be able to index within that as well - stims = rows["stimuli", "stimulus"][0] + stims = rows["stimuli", "stimulus"] for i in range(len(stims)): - assert isinstance(stims[i], VoltageClampStimulusSeries) - assert all([i == val for val in stims[i][:]]) + assert all(np.array(stims[i]) == i)