diff --git a/nwb_linkml/src/nwb_linkml/models/__init__.py b/nwb_linkml/src/nwb_linkml/models/__init__.py index 586e382..97cb846 100644 --- a/nwb_linkml/src/nwb_linkml/models/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/__init__.py @@ -1 +1 @@ -from .pydantic.core.v2_6_0_alpha.namespace import * +from .pydantic.core.v2_7_0.namespace import * \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py index 41ab18e..0766284 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_base.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,6 +55,7 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_0.hdmf_common_table import Container, Data, DynamicTable + metamodel_version = "None" version = "2.2.0" @@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +112,9 @@ class Image(NWBData): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -222,7 +254,7 @@ class Images(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Images") description: Optional[str] = Field( None, description="""Description of this collection of images.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py index 11e641e..4510297 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_behavior.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,10 +55,14 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, TimeSeries, ) + from .core_nwb_misc import IntervalSeries + metamodel_version = "None" version = "2.2.0" @@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries): None, description="""Description defining what exactly 'straight-ahead' means.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -196,7 +235,9 @@ class Position(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py index 4da43f0..6c7d3f7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_device.py @@ -1,26 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .core_nwb_base import NWBContainer + metamodel_version = "None" version = "2.2.0" @@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py index 98ff0bf..9723157 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ecephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,17 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTableRegion +from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, DynamicTableRegion + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, NWBDataInterface, + TimeSeriesSync, TimeSeries, + NWBContainer, ) + metamodel_version = "None" version = "2.2.0" @@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -156,7 +189,9 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -185,14 +220,16 @@ class FeatureExtraction(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("FeatureExtraction") description: NDArray[Shape["* num_features"], str] = Field( ..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""", ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( - ..., - description="""Multi-dimensional array of features extracted from each event.""", + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) ) times: NDArray[Shape["* num_events"], float] = Field( ..., @@ -226,7 +263,7 @@ class EventDetection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("EventDetection") detection_method: str = Field( ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", @@ -246,7 +283,9 @@ class EventWaveform(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -256,7 +295,9 @@ class FilteredEphys(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -266,7 +307,9 @@ class LFP(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -277,23 +320,37 @@ class ElectrodeGroup(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) location: Optional[str] = Field( None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) - position: Optional[Any] = Field( + position: Optional[str] = Field( None, description="""stereotaxic or common framework coordinates""" ) +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + class ClusterWaveforms(NWBDataInterface): """ DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ClusterWaveforms") waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) @@ -313,7 +370,7 @@ class Clustering(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Clustering") description: str = Field( ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", @@ -343,5 +400,6 @@ EventWaveform.model_rebuild() FilteredEphys.model_rebuild() LFP.model_rebuild() ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() ClusterWaveforms.model_rebuild() Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py index 35d4c9b..8586b33 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_epoch.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,10 +54,14 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_0.hdmf_common_table import ( + VectorData, DynamicTable, VectorIndex, ) +from .core_nwb_base import TimeSeries + + metamodel_version = "None" version = "2.2.0" @@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable): description="""User-defined tags that identify or categorize events.""", ) tags_index: Optional[str] = Field(None, description="""Index for tags.""") - timeseries: Optional[List[Any] | Any] = Field( - default_factory=list, description="""An index into a TimeSeries object.""" + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" ) - timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""") colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -122,6 +156,29 @@ class TimeIntervalsTagsIndex(VectorIndex): ) +class TimeIntervalsTimeseries(VectorData): + """ + An index into a TimeSeries object. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["timeseries"] = Field("timeseries") + idx_start: Optional[int] = Field( + None, + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: Optional[int] = Field( + None, + description="""Number of data samples available in this time series, during this epoch.""", + ) + timeseries: Optional[str] = Field( + None, description="""the TimeSeries that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + + class TimeIntervalsTimeseriesIndex(VectorIndex): """ Index for timeseries. @@ -139,4 +196,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TimeIntervals.model_rebuild() TimeIntervalsTagsIndex.model_rebuild() +TimeIntervalsTimeseries.model_rebuild() TimeIntervalsTimeseriesIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py index f501bd9..863c4e2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_file.py @@ -1,23 +1,49 @@ from __future__ import annotations - -import sys -from datetime import datetime +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,14 +53,28 @@ if TYPE_CHECKING: import numpy as np +from .core_nwb_misc import Units + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + +from .core_nwb_base import NWBContainer, TimeSeries, ProcessingModule, NWBDataInterface + +from .core_nwb_ophys import ImagingPlane + +from .core_nwb_icephys import SweepTable, IntracellularElectrode + from ...hdmf_common.v1_1_0.hdmf_common_table import ( + VectorIndex, + VectorData, DynamicTable, ) -from .core_nwb_base import NWBContainer, NWBDataInterface, ProcessingModule, TimeSeries -from .core_nwb_device import Device -from .core_nwb_epoch import TimeIntervals + from .core_nwb_ogen import OptogeneticStimulusSite -from .core_nwb_ophys import ImagingPlane + metamodel_version = "None" version = "2.2.0" @@ -54,7 +94,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -183,7 +223,9 @@ class NWBFileGeneral(ConfiguredBaseModel): keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( None, description="""Terms to search over.""" ) - lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""") + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) notes: Optional[str] = Field(None, description="""Notes about the experiment.""") pharmacology: Optional[str] = Field( None, @@ -196,7 +238,9 @@ class NWBFileGeneral(ConfiguredBaseModel): related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( None, description="""Publication information. PMID, DOI, URL, etc.""" ) - session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""") + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) slices: Optional[str] = Field( None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", @@ -235,9 +279,11 @@ class NWBFileGeneral(ConfiguredBaseModel): intracellular_ephys: Optional[str] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) - optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field( - default_factory=dict, - description="""Metadata describing optogenetic stimuluation.""", + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) ) optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( default_factory=dict, description="""Metadata related to optophysiology.""" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py index e473ffc..8b4e86b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_icephys.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,15 +53,20 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_0.hdmf_common_table import ( - DynamicTable, - VectorIndex, -) from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, TimeSeries, + TimeSeriesSync, ) +from ...hdmf_common.v1_1_0.hdmf_common_table import ( + VectorIndex, + VectorData, + DynamicTable, +) + + metamodel_version = "None" version = "2.2.0" @@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded voltage.""") - bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) @@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) @@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded current.""") - capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""") - capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) resistance_comp_bandwidth: Optional[str] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp") + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) unit: Optional[str] = Field( None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", @@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer): ..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", ) - filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""") + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) initial_access_resistance: Optional[str] = Field( None, description="""Initial access resistance.""" ) @@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer): None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) - resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""") - seal: Optional[str] = Field(None, description="""Information about seal used for recording.""") + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py index e1427de..bc18173 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_image.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,7 +53,8 @@ if TYPE_CHECKING: import numpy as np -from .core_nwb_base import Image, TimeSeries +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + metamodel_version = "None" version = "2.2.0" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -74,7 +103,9 @@ class GrayscaleImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -94,7 +125,9 @@ class RGBImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -114,7 +147,9 @@ class RGBAImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -148,7 +183,9 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -280,7 +319,9 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -317,7 +358,9 @@ class IndexSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py index a44d9f6..c1b8fec 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_misc.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,12 +54,16 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_0.hdmf_common_table import ( + VectorData, DynamicTable, DynamicTableRegion, - VectorData, VectorIndex, ) -from .core_nwb_base import TimeSeries + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries + +from .core_nwb_ecephys import ElectrodeGroup + metamodel_version = "None" version = "2.2.0" @@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries): ..., description="""Description of the features represented in TimeSeries::data.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries): data: NDArray[Shape["* num_times"], str] = Field( ..., description="""Annotations made during an experiment.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Data decomposed into frequency bands.""") - metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) bands: str = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) class DecompositionSeriesBands(DynamicTable): @@ -306,18 +349,22 @@ class Units(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Units") spike_times_index: Optional[str] = Field( None, description="""Index into the spike_times dataset.""" ) - spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""") + spike_times: Optional[str] = Field( + None, description="""Spike times for each unit.""" + ) obs_intervals_index: Optional[str] = Field( None, description="""Index into the obs_intervals dataset.""" ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field( - None, description="""Observation intervals for each unit.""" + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" ) - electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""") electrodes: Optional[str] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", @@ -337,7 +384,9 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py index 2619596..910f69b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ogen.py @@ -1,25 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -27,8 +56,11 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBContainer, TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.2.0" @@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries): data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py index b6ca2cd..c3d498f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_ophys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,18 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, DynamicTableRegion +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, + TimeSeriesSync, NWBDataInterface, TimeSeries, + NWBContainer, ) -from .core_nwb_image import ImageSeries + +from ...hdmf_common.v1_1_0.hdmf_common_table import DynamicTable, DynamicTableRegion + metamodel_version = "None" version = "2.2.0" @@ -52,7 +84,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -109,7 +141,9 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -151,7 +185,9 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -200,7 +236,9 @@ class DfOverF(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -210,7 +248,9 @@ class Fluorescence(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -220,9 +260,9 @@ class ImageSegmentation(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = ( - Field(default_factory=dict) - ) + children: Optional[ + List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable] + ] = Field(default_factory=dict) name: str = Field(...) @@ -233,9 +273,15 @@ class ImagingPlane(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the imaging plane.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") - imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""") + description: Optional[str] = Field( + None, description="""Description of the imaging plane.""" + ) + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) + imaging_rate: float = Field( + ..., description="""Rate that images are acquired, in Hz.""" + ) indicator: str = Field(..., description="""Calcium indicator.""") location: str = Field( ..., @@ -320,8 +366,12 @@ class OpticalChannel(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: str = Field(...) - description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") + description: str = Field( + ..., description="""Description or other notes about the channel.""" + ) + emission_lambda: float = Field( + ..., description="""Emission wavelength for channel, in nm.""" + ) class MotionCorrection(NWBDataInterface): @@ -330,7 +380,9 @@ class MotionCorrection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field(default_factory=dict) + children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py index 5fc58ca..0320789 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/core_nwb_retinotopy.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,8 +54,10 @@ if TYPE_CHECKING: from .core_nwb_base import NWBData, NWBDataInterface + from .core_nwb_image import GrayscaleImage + metamodel_version = "None" version = "2.2.0" @@ -46,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +106,9 @@ class RetinotopyMap(NWBData): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -95,7 +127,9 @@ class AxisMap(RetinotopyMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class RetinotopyImage(GrayscaleImage): @@ -113,14 +147,18 @@ class RetinotopyImage(GrayscaleImage): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -136,7 +174,7 @@ class ImagingRetinotopy(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ImagingRetinotopy") axis_1_phase_map: str = Field( ..., description="""Phase response to stimulus on the first measured axis.""" ) @@ -183,7 +221,9 @@ class ImagingRetinotopyAxis1PhaseMap(AxisMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class ImagingRetinotopyAxis1PowerMap(AxisMap): @@ -201,7 +241,9 @@ class ImagingRetinotopyAxis1PowerMap(AxisMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class ImagingRetinotopyAxis2PhaseMap(AxisMap): @@ -219,7 +261,9 @@ class ImagingRetinotopyAxis2PhaseMap(AxisMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class ImagingRetinotopyAxis2PowerMap(AxisMap): @@ -237,7 +281,9 @@ class ImagingRetinotopyAxis2PowerMap(AxisMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class ImagingRetinotopySignMap(RetinotopyMap): @@ -251,7 +297,9 @@ class ImagingRetinotopySignMap(RetinotopyMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -262,7 +310,9 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["focal_depth_image"] = Field("focal_depth_image") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", @@ -271,14 +321,18 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -303,14 +357,18 @@ class ImagingRetinotopyVasculatureImage(RetinotopyImage): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py index 33c29fc..70335e7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_0/namespace.py @@ -1,23 +1,220 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from ...hdmf_common.v1_1_0.hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) + +from ...hdmf_common.v1_1_0.hdmf_common_table import ( + Data, + Index, + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + Container, + DynamicTable, +) + +from .core_nwb_retinotopy import ( + RetinotopyMap, + AxisMap, + RetinotopyImage, + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopySignMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_base import ( + NWBData, + Image, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, +) + +from .core_nwb_ophys import ( + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, + OpticalChannel, + MotionCorrection, +) + +from .core_nwb_device import Device + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, +) + +from .core_nwb_file import ( + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + Subject, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + metamodel_version = "None" version = "2.2.0" @@ -36,7 +233,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py index 5b60aa7..b7471a3 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_base.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,6 +55,7 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_2.hdmf_common_table import Container, Data, DynamicTable + metamodel_version = "None" version = "2.2.1" @@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +112,9 @@ class Image(NWBData): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -222,7 +254,7 @@ class Images(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Images") description: Optional[str] = Field( None, description="""Description of this collection of images.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py index 10138f0..3ed879b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_behavior.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,10 +55,14 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, TimeSeries, ) + from .core_nwb_misc import IntervalSeries + metamodel_version = "None" version = "2.2.1" @@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries): None, description="""Description defining what exactly 'straight-ahead' means.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -196,7 +235,9 @@ class Position(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py index c43fdf4..f37190a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_device.py @@ -1,26 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .core_nwb_base import NWBContainer + metamodel_version = "None" version = "2.2.1" @@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py index 0160620..99fea00 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ecephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,17 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTableRegion +from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, DynamicTableRegion + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, NWBDataInterface, + TimeSeriesSync, TimeSeries, + NWBContainer, ) + metamodel_version = "None" version = "2.2.1" @@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -156,7 +189,9 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -185,14 +220,16 @@ class FeatureExtraction(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("FeatureExtraction") description: NDArray[Shape["* num_features"], str] = Field( ..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""", ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( - ..., - description="""Multi-dimensional array of features extracted from each event.""", + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) ) times: NDArray[Shape["* num_events"], float] = Field( ..., @@ -226,7 +263,7 @@ class EventDetection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("EventDetection") detection_method: str = Field( ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", @@ -246,7 +283,9 @@ class EventWaveform(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -256,7 +295,9 @@ class FilteredEphys(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -266,7 +307,9 @@ class LFP(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -277,23 +320,37 @@ class ElectrodeGroup(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) location: Optional[str] = Field( None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) - position: Optional[Any] = Field( + position: Optional[str] = Field( None, description="""stereotaxic or common framework coordinates""" ) +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + class ClusterWaveforms(NWBDataInterface): """ DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ClusterWaveforms") waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) @@ -313,7 +370,7 @@ class Clustering(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Clustering") description: str = Field( ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", @@ -343,5 +400,6 @@ EventWaveform.model_rebuild() FilteredEphys.model_rebuild() LFP.model_rebuild() ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() ClusterWaveforms.model_rebuild() Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py index b4457e0..4f781fd 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_epoch.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,10 +54,14 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_2.hdmf_common_table import ( + VectorData, DynamicTable, VectorIndex, ) +from .core_nwb_base import TimeSeries + + metamodel_version = "None" version = "2.2.1" @@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable): description="""User-defined tags that identify or categorize events.""", ) tags_index: Optional[str] = Field(None, description="""Index for tags.""") - timeseries: Optional[List[Any] | Any] = Field( - default_factory=list, description="""An index into a TimeSeries object.""" + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" ) - timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""") colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -122,6 +156,29 @@ class TimeIntervalsTagsIndex(VectorIndex): ) +class TimeIntervalsTimeseries(VectorData): + """ + An index into a TimeSeries object. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["timeseries"] = Field("timeseries") + idx_start: Optional[int] = Field( + None, + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: Optional[int] = Field( + None, + description="""Number of data samples available in this time series, during this epoch.""", + ) + timeseries: Optional[str] = Field( + None, description="""the TimeSeries that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + + class TimeIntervalsTimeseriesIndex(VectorIndex): """ Index for timeseries. @@ -139,4 +196,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TimeIntervals.model_rebuild() TimeIntervalsTagsIndex.model_rebuild() +TimeIntervalsTimeseries.model_rebuild() TimeIntervalsTimeseriesIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py index bffbe6f..b7ec4fc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_file.py @@ -1,23 +1,49 @@ from __future__ import annotations - -import sys -from datetime import datetime +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,14 +53,28 @@ if TYPE_CHECKING: import numpy as np +from .core_nwb_misc import Units + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + +from .core_nwb_base import NWBContainer, TimeSeries, ProcessingModule, NWBDataInterface + +from .core_nwb_ophys import ImagingPlane + +from .core_nwb_icephys import SweepTable, IntracellularElectrode + from ...hdmf_common.v1_1_2.hdmf_common_table import ( + VectorIndex, + VectorData, DynamicTable, ) -from .core_nwb_base import NWBContainer, NWBDataInterface, ProcessingModule, TimeSeries -from .core_nwb_device import Device -from .core_nwb_epoch import TimeIntervals + from .core_nwb_ogen import OptogeneticStimulusSite -from .core_nwb_ophys import ImagingPlane + metamodel_version = "None" version = "2.2.1" @@ -54,7 +94,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -183,7 +223,9 @@ class NWBFileGeneral(ConfiguredBaseModel): keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( None, description="""Terms to search over.""" ) - lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""") + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) notes: Optional[str] = Field(None, description="""Notes about the experiment.""") pharmacology: Optional[str] = Field( None, @@ -196,7 +238,9 @@ class NWBFileGeneral(ConfiguredBaseModel): related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( None, description="""Publication information. PMID, DOI, URL, etc.""" ) - session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""") + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) slices: Optional[str] = Field( None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", @@ -235,9 +279,11 @@ class NWBFileGeneral(ConfiguredBaseModel): intracellular_ephys: Optional[str] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) - optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field( - default_factory=dict, - description="""Metadata describing optogenetic stimuluation.""", + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) ) optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( default_factory=dict, description="""Metadata related to optophysiology.""" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py index c51503c..3371688 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_icephys.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,15 +53,20 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_2.hdmf_common_table import ( - DynamicTable, - VectorIndex, -) from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, TimeSeries, + TimeSeriesSync, ) +from ...hdmf_common.v1_1_2.hdmf_common_table import ( + VectorIndex, + VectorData, + DynamicTable, +) + + metamodel_version = "None" version = "2.2.1" @@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded voltage.""") - bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) @@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) @@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded current.""") - capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""") - capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) resistance_comp_bandwidth: Optional[str] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp") + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) unit: Optional[str] = Field( None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", @@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer): ..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", ) - filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""") + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) initial_access_resistance: Optional[str] = Field( None, description="""Initial access resistance.""" ) @@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer): None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) - resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""") - seal: Optional[str] = Field(None, description="""Information about seal used for recording.""") + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py index 95aaf78..6170445 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_image.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,7 +53,8 @@ if TYPE_CHECKING: import numpy as np -from .core_nwb_base import Image, TimeSeries +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + metamodel_version = "None" version = "2.2.1" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -74,7 +103,9 @@ class GrayscaleImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -94,7 +125,9 @@ class RGBImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -114,7 +147,9 @@ class RGBAImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -148,7 +183,9 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -280,7 +319,9 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -317,7 +358,9 @@ class IndexSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py index ddc484a..39d5000 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_misc.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,12 +54,16 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_2.hdmf_common_table import ( + VectorData, DynamicTable, DynamicTableRegion, - VectorData, VectorIndex, ) -from .core_nwb_base import TimeSeries + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries + +from .core_nwb_ecephys import ElectrodeGroup + metamodel_version = "None" version = "2.2.1" @@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries): ..., description="""Description of the features represented in TimeSeries::data.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries): data: NDArray[Shape["* num_times"], str] = Field( ..., description="""Annotations made during an experiment.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Data decomposed into frequency bands.""") - metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) bands: str = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) class DecompositionSeriesBands(DynamicTable): @@ -306,18 +349,22 @@ class Units(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Units") spike_times_index: Optional[str] = Field( None, description="""Index into the spike_times dataset.""" ) - spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""") + spike_times: Optional[str] = Field( + None, description="""Spike times for each unit.""" + ) obs_intervals_index: Optional[str] = Field( None, description="""Index into the obs_intervals dataset.""" ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field( - None, description="""Observation intervals for each unit.""" + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" ) - electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""") electrodes: Optional[str] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", @@ -337,7 +384,9 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py index 5018519..921daf9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ogen.py @@ -1,25 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -27,8 +56,11 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBContainer, TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.2.1" @@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries): data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py index 47f80a9..f118d26 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_ophys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,18 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, DynamicTableRegion +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, + TimeSeriesSync, NWBDataInterface, TimeSeries, + NWBContainer, ) -from .core_nwb_image import ImageSeries + +from ...hdmf_common.v1_1_2.hdmf_common_table import DynamicTable, DynamicTableRegion + metamodel_version = "None" version = "2.2.1" @@ -52,7 +84,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -109,7 +141,9 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -151,7 +185,9 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -200,7 +236,9 @@ class DfOverF(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -210,7 +248,9 @@ class Fluorescence(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -220,9 +260,9 @@ class ImageSegmentation(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = ( - Field(default_factory=dict) - ) + children: Optional[ + List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable] + ] = Field(default_factory=dict) name: str = Field(...) @@ -233,9 +273,15 @@ class ImagingPlane(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the imaging plane.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") - imaging_rate: float = Field(..., description="""Rate that images are acquired, in Hz.""") + description: Optional[str] = Field( + None, description="""Description of the imaging plane.""" + ) + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) + imaging_rate: float = Field( + ..., description="""Rate that images are acquired, in Hz.""" + ) indicator: str = Field(..., description="""Calcium indicator.""") location: str = Field( ..., @@ -320,8 +366,12 @@ class OpticalChannel(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: str = Field(...) - description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") + description: str = Field( + ..., description="""Description or other notes about the channel.""" + ) + emission_lambda: float = Field( + ..., description="""Emission wavelength for channel, in nm.""" + ) class MotionCorrection(NWBDataInterface): @@ -330,7 +380,9 @@ class MotionCorrection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field(default_factory=dict) + children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py index e21d0d4..73ed473 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/core_nwb_retinotopy.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,8 +54,10 @@ if TYPE_CHECKING: from .core_nwb_base import NWBData, NWBDataInterface + from .core_nwb_image import GrayscaleImage + metamodel_version = "None" version = "2.2.1" @@ -46,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +106,9 @@ class RetinotopyMap(NWBData): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -95,7 +127,9 @@ class AxisMap(RetinotopyMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class RetinotopyImage(GrayscaleImage): @@ -113,14 +147,18 @@ class RetinotopyImage(GrayscaleImage): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -136,7 +174,7 @@ class ImagingRetinotopy(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ImagingRetinotopy") axis_1_phase_map: str = Field( ..., description="""Phase response to stimulus on the first measured axis.""" ) @@ -183,7 +221,9 @@ class ImagingRetinotopyAxis1PhaseMap(AxisMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class ImagingRetinotopyAxis1PowerMap(AxisMap): @@ -201,7 +241,9 @@ class ImagingRetinotopyAxis1PowerMap(AxisMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class ImagingRetinotopyAxis2PhaseMap(AxisMap): @@ -219,7 +261,9 @@ class ImagingRetinotopyAxis2PhaseMap(AxisMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class ImagingRetinotopyAxis2PowerMap(AxisMap): @@ -237,7 +281,9 @@ class ImagingRetinotopyAxis2PowerMap(AxisMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) class ImagingRetinotopySignMap(RetinotopyMap): @@ -251,7 +297,9 @@ class ImagingRetinotopySignMap(RetinotopyMap): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -262,7 +310,9 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["focal_depth_image"] = Field("focal_depth_image") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) bits_per_pixel: Optional[int] = Field( None, description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", @@ -271,14 +321,18 @@ class ImagingRetinotopyFocalDepthImage(RetinotopyImage): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -303,14 +357,18 @@ class ImagingRetinotopyVasculatureImage(RetinotopyImage): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py index a31471d..3c7ae26 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_1/namespace.py @@ -1,23 +1,220 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from ...hdmf_common.v1_1_2.hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) + +from ...hdmf_common.v1_1_2.hdmf_common_table import ( + Data, + Index, + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + Container, + DynamicTable, +) + +from .core_nwb_retinotopy import ( + RetinotopyMap, + AxisMap, + RetinotopyImage, + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopySignMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_base import ( + NWBData, + Image, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, +) + +from .core_nwb_ophys import ( + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + ImagingPlane, + ImagingPlaneManifold, + ImagingPlaneOriginCoords, + ImagingPlaneGridSpacing, + OpticalChannel, + MotionCorrection, +) + +from .core_nwb_device import Device + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, +) + +from .core_nwb_file import ( + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + Subject, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + metamodel_version = "None" version = "2.2.1" @@ -36,7 +233,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py index bbb0578..910624d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_base.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,6 +55,7 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable + metamodel_version = "None" version = "2.2.2" @@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +112,9 @@ class Image(NWBData): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -222,7 +254,7 @@ class Images(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Images") description: Optional[str] = Field( None, description="""Description of this collection of images.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py index 611e039..a6cea06 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_behavior.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,10 +55,14 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, TimeSeries, ) + from .core_nwb_misc import IntervalSeries + metamodel_version = "None" version = "2.2.2" @@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries): None, description="""Description defining what exactly 'straight-ahead' means.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -196,7 +235,9 @@ class Position(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py index 85c9bf5..d0696c1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_device.py @@ -1,26 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .core_nwb_base import NWBContainer + metamodel_version = "None" version = "2.2.2" @@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py index c2aa552..39cef94 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ecephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,17 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTableRegion +from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, NWBDataInterface, + TimeSeriesSync, TimeSeries, + NWBContainer, ) + metamodel_version = "None" version = "2.2.2" @@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -164,7 +197,9 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -193,14 +228,16 @@ class FeatureExtraction(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("FeatureExtraction") description: NDArray[Shape["* num_features"], str] = Field( ..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""", ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( - ..., - description="""Multi-dimensional array of features extracted from each event.""", + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) ) times: NDArray[Shape["* num_events"], float] = Field( ..., @@ -242,7 +279,7 @@ class EventDetection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("EventDetection") detection_method: str = Field( ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", @@ -262,7 +299,9 @@ class EventWaveform(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -272,7 +311,9 @@ class FilteredEphys(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -282,7 +323,9 @@ class LFP(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -293,23 +336,37 @@ class ElectrodeGroup(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) location: Optional[str] = Field( None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) - position: Optional[Any] = Field( + position: Optional[str] = Field( None, description="""stereotaxic or common framework coordinates""" ) +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + class ClusterWaveforms(NWBDataInterface): """ DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ClusterWaveforms") waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) @@ -329,7 +386,7 @@ class Clustering(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Clustering") description: str = Field( ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", @@ -359,5 +416,6 @@ EventWaveform.model_rebuild() FilteredEphys.model_rebuild() LFP.model_rebuild() ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() ClusterWaveforms.model_rebuild() Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py index a269c7e..0cd3e67 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_epoch.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,10 +54,14 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorData, DynamicTable, VectorIndex, ) +from .core_nwb_base import TimeSeries + + metamodel_version = "None" version = "2.2.2" @@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable): description="""User-defined tags that identify or categorize events.""", ) tags_index: Optional[str] = Field(None, description="""Index for tags.""") - timeseries: Optional[List[Any] | Any] = Field( - default_factory=list, description="""An index into a TimeSeries object.""" + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" ) - timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""") colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -123,6 +157,37 @@ class TimeIntervalsTagsIndex(VectorIndex): array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None) +class TimeIntervalsTimeseries(VectorData): + """ + An index into a TimeSeries object. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["timeseries"] = Field("timeseries") + idx_start: Optional[int] = Field( + None, + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: Optional[int] = Field( + None, + description="""Number of data samples available in this time series, during this epoch.""", + ) + timeseries: Optional[str] = Field( + None, description="""the TimeSeries that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class TimeIntervalsTimeseriesIndex(VectorIndex): """ Index for timeseries. @@ -141,4 +206,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TimeIntervals.model_rebuild() TimeIntervalsTagsIndex.model_rebuild() +TimeIntervalsTimeseries.model_rebuild() TimeIntervalsTimeseriesIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py index f176c75..f8fd528 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_file.py @@ -1,23 +1,49 @@ from __future__ import annotations - -import sys -from datetime import datetime +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,14 +53,28 @@ if TYPE_CHECKING: import numpy as np +from .core_nwb_misc import Units + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + +from .core_nwb_base import NWBContainer, TimeSeries, ProcessingModule, NWBDataInterface + +from .core_nwb_ophys import ImagingPlane + +from .core_nwb_icephys import SweepTable, IntracellularElectrode + from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorIndex, + VectorData, DynamicTable, ) -from .core_nwb_base import NWBContainer, NWBDataInterface, ProcessingModule, TimeSeries -from .core_nwb_device import Device -from .core_nwb_epoch import TimeIntervals + from .core_nwb_ogen import OptogeneticStimulusSite -from .core_nwb_ophys import ImagingPlane + metamodel_version = "None" version = "2.2.2" @@ -54,7 +94,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -183,7 +223,9 @@ class NWBFileGeneral(ConfiguredBaseModel): keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( None, description="""Terms to search over.""" ) - lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""") + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) notes: Optional[str] = Field(None, description="""Notes about the experiment.""") pharmacology: Optional[str] = Field( None, @@ -196,7 +238,9 @@ class NWBFileGeneral(ConfiguredBaseModel): related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( None, description="""Publication information. PMID, DOI, URL, etc.""" ) - session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""") + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) slices: Optional[str] = Field( None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", @@ -235,9 +279,11 @@ class NWBFileGeneral(ConfiguredBaseModel): intracellular_ephys: Optional[str] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) - optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field( - default_factory=dict, - description="""Metadata describing optogenetic stimuluation.""", + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) ) optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( default_factory=dict, description="""Metadata related to optophysiology.""" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py index 33aff04..f671534 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_icephys.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,15 +53,20 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_3.hdmf_common_table import ( - DynamicTable, - VectorIndex, -) from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, TimeSeries, + TimeSeriesSync, ) +from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorIndex, + VectorData, + DynamicTable, +) + + metamodel_version = "None" version = "2.2.2" @@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded voltage.""") - bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) @@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) @@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded current.""") - capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""") - capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) resistance_comp_bandwidth: Optional[str] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp") + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) unit: Optional[str] = Field( None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", @@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer): ..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", ) - filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""") + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) initial_access_resistance: Optional[str] = Field( None, description="""Initial access resistance.""" ) @@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer): None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) - resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""") - seal: Optional[str] = Field(None, description="""Information about seal used for recording.""") + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py index 8488838..5a7a945 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_image.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,7 +53,8 @@ if TYPE_CHECKING: import numpy as np -from .core_nwb_base import Image, TimeSeries +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + metamodel_version = "None" version = "2.2.2" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -74,7 +103,9 @@ class GrayscaleImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -94,7 +125,9 @@ class RGBImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -114,7 +147,9 @@ class RGBAImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -148,7 +183,9 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -262,7 +301,9 @@ class OpticalSeries(ImageSeries): data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") + ] = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -278,7 +319,9 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -315,7 +358,9 @@ class IndexSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py index 6f4218d..3a977e4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_misc.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,12 +54,16 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorData, DynamicTable, DynamicTableRegion, - VectorData, VectorIndex, ) -from .core_nwb_base import TimeSeries + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries + +from .core_nwb_ecephys import ElectrodeGroup + metamodel_version = "None" version = "2.2.2" @@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries): ..., description="""Description of the features represented in TimeSeries::data.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries): data: NDArray[Shape["* num_times"], str] = Field( ..., description="""Annotations made during an experiment.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Data decomposed into frequency bands.""") - metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) bands: str = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) class DecompositionSeriesBands(DynamicTable): @@ -306,18 +349,22 @@ class Units(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Units") spike_times_index: Optional[str] = Field( None, description="""Index into the spike_times dataset.""" ) - spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""") + spike_times: Optional[str] = Field( + None, description="""Spike times for each unit.""" + ) obs_intervals_index: Optional[str] = Field( None, description="""Index into the obs_intervals dataset.""" ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field( - None, description="""Observation intervals for each unit.""" + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" ) - electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""") electrodes: Optional[str] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", @@ -337,7 +384,9 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py index f0210aa..7632919 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ogen.py @@ -1,25 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -27,8 +56,11 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBContainer, TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.2.2" @@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries): data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py index cdd5722..b62056c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_ophys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,18 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, + TimeSeriesSync, NWBDataInterface, TimeSeries, + NWBContainer, ) -from .core_nwb_image import ImageSeries + +from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion + metamodel_version = "None" version = "2.2.2" @@ -52,7 +84,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -109,7 +141,9 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -151,7 +185,9 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -208,7 +244,9 @@ class DfOverF(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -218,7 +256,9 @@ class Fluorescence(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -228,9 +268,9 @@ class ImageSegmentation(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = ( - Field(default_factory=dict) - ) + children: Optional[ + List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable] + ] = Field(default_factory=dict) name: str = Field(...) @@ -250,7 +290,9 @@ class MotionCorrection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field(default_factory=dict) + children: Optional[List[NWBDataInterface] | NWBDataInterface] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py index 2cac50c..501641f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/core_nwb_retinotopy.py @@ -1,20 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,6 +55,7 @@ if TYPE_CHECKING: from .core_nwb_base import NWBDataInterface + metamodel_version = "None" version = "2.2.2" @@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ImagingRetinotopy") axis_1_phase_map: str = Field( ..., description="""Phase response to stimulus on the first measured axis.""" ) @@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) @@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py index 93ecb32..41ea4b2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_2/namespace.py @@ -1,23 +1,213 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from ...hdmf_common.v1_1_3.hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) + +from ...hdmf_common.v1_1_3.hdmf_common_table import ( + Data, + Index, + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + Container, + DynamicTable, +) + +from .core_nwb_retinotopy import ( + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopySignMap, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_base import ( + NWBData, + Image, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, +) + +from .core_nwb_ophys import ( + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + ImagingPlane, + MotionCorrection, +) + +from .core_nwb_device import Device + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, +) + +from .core_nwb_file import ( + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + Subject, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + metamodel_version = "None" version = "2.2.2" @@ -36,7 +226,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py index c2f72a3..936a875 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_base.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,6 +55,7 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable + metamodel_version = "None" version = "2.2.4" @@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +112,9 @@ class Image(NWBData): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -222,7 +254,7 @@ class Images(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Images") description: Optional[str] = Field( None, description="""Description of this collection of images.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py index 3d23f95..bdee632 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_behavior.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,10 +55,14 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, TimeSeries, ) + from .core_nwb_misc import IntervalSeries + metamodel_version = "None" version = "2.2.4" @@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries): None, description="""Description defining what exactly 'straight-ahead' means.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -196,7 +235,9 @@ class Position(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py index 8354a73..4105070 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_device.py @@ -1,26 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .core_nwb_base import NWBContainer + metamodel_version = "None" version = "2.2.4" @@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py index 7ed4a70..ac10dec 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ecephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,17 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTableRegion +from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, NWBDataInterface, + TimeSeriesSync, TimeSeries, + NWBContainer, ) + metamodel_version = "None" version = "2.2.4" @@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -164,7 +197,9 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -193,14 +228,16 @@ class FeatureExtraction(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("FeatureExtraction") description: NDArray[Shape["* num_features"], str] = Field( ..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""", ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( - ..., - description="""Multi-dimensional array of features extracted from each event.""", + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) ) times: NDArray[Shape["* num_events"], float] = Field( ..., @@ -242,7 +279,7 @@ class EventDetection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("EventDetection") detection_method: str = Field( ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", @@ -262,7 +299,9 @@ class EventWaveform(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -272,7 +311,9 @@ class FilteredEphys(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -282,7 +323,9 @@ class LFP(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -293,23 +336,37 @@ class ElectrodeGroup(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) location: Optional[str] = Field( None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) - position: Optional[Any] = Field( + position: Optional[str] = Field( None, description="""stereotaxic or common framework coordinates""" ) +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + class ClusterWaveforms(NWBDataInterface): """ DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ClusterWaveforms") waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) @@ -329,7 +386,7 @@ class Clustering(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Clustering") description: str = Field( ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", @@ -359,5 +416,6 @@ EventWaveform.model_rebuild() FilteredEphys.model_rebuild() LFP.model_rebuild() ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() ClusterWaveforms.model_rebuild() Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py index 6b64662..4166313 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_epoch.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,10 +54,14 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorData, DynamicTable, VectorIndex, ) +from .core_nwb_base import TimeSeries + + metamodel_version = "None" version = "2.2.4" @@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable): description="""User-defined tags that identify or categorize events.""", ) tags_index: Optional[str] = Field(None, description="""Index for tags.""") - timeseries: Optional[List[Any] | Any] = Field( - default_factory=list, description="""An index into a TimeSeries object.""" + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" ) - timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""") colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -123,6 +157,37 @@ class TimeIntervalsTagsIndex(VectorIndex): array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None) +class TimeIntervalsTimeseries(VectorData): + """ + An index into a TimeSeries object. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["timeseries"] = Field("timeseries") + idx_start: Optional[int] = Field( + None, + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: Optional[int] = Field( + None, + description="""Number of data samples available in this time series, during this epoch.""", + ) + timeseries: Optional[str] = Field( + None, description="""the TimeSeries that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class TimeIntervalsTimeseriesIndex(VectorIndex): """ Index for timeseries. @@ -141,4 +206,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TimeIntervals.model_rebuild() TimeIntervalsTagsIndex.model_rebuild() +TimeIntervalsTimeseries.model_rebuild() TimeIntervalsTimeseriesIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py index 1fdbdd0..7ada071 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_file.py @@ -1,23 +1,49 @@ from __future__ import annotations - -import sys -from datetime import datetime +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,21 +53,35 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_3.hdmf_common_table import ( - DynamicTable, -) +from .core_nwb_misc import Units + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + from .core_nwb_base import ( NWBContainer, NWBData, - NWBDataInterface, - ProcessingModule, TimeSeries, + ProcessingModule, + NWBDataInterface, ) -from .core_nwb_device import Device -from .core_nwb_epoch import TimeIntervals -from .core_nwb_ogen import OptogeneticStimulusSite + from .core_nwb_ophys import ImagingPlane +from .core_nwb_icephys import SweepTable, IntracellularElectrode + +from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorIndex, + VectorData, + DynamicTable, +) + +from .core_nwb_ogen import OptogeneticStimulusSite + + metamodel_version = "None" version = "2.2.4" @@ -60,7 +100,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -201,7 +241,9 @@ class NWBFileGeneral(ConfiguredBaseModel): keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( None, description="""Terms to search over.""" ) - lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""") + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) notes: Optional[str] = Field(None, description="""Notes about the experiment.""") pharmacology: Optional[str] = Field( None, @@ -214,7 +256,9 @@ class NWBFileGeneral(ConfiguredBaseModel): related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( None, description="""Publication information. PMID, DOI, URL, etc.""" ) - session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""") + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) slices: Optional[str] = Field( None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", @@ -253,9 +297,11 @@ class NWBFileGeneral(ConfiguredBaseModel): intracellular_ephys: Optional[str] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) - optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field( - default_factory=dict, - description="""Metadata describing optogenetic stimuluation.""", + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) ) optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( default_factory=dict, description="""Metadata related to optophysiology.""" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py index e1d97dd..eff8f65 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_icephys.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,15 +53,20 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_3.hdmf_common_table import ( - DynamicTable, - VectorIndex, -) from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, TimeSeries, + TimeSeriesSync, ) +from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorIndex, + VectorData, + DynamicTable, +) + + metamodel_version = "None" version = "2.2.4" @@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded voltage.""") - bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) @@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) @@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded current.""") - capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""") - capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) resistance_comp_bandwidth: Optional[str] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp") + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) unit: Optional[str] = Field( None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", @@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer): ..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", ) - filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""") + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) initial_access_resistance: Optional[str] = Field( None, description="""Initial access resistance.""" ) @@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer): None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) - resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""") - seal: Optional[str] = Field(None, description="""Information about seal used for recording.""") + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py index 58ceb08..1831f88 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_image.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,7 +53,8 @@ if TYPE_CHECKING: import numpy as np -from .core_nwb_base import Image, TimeSeries +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + metamodel_version = "None" version = "2.2.4" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -74,7 +103,9 @@ class GrayscaleImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -94,7 +125,9 @@ class RGBImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -114,7 +147,9 @@ class RGBAImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -148,7 +183,9 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -262,7 +301,9 @@ class OpticalSeries(ImageSeries): data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") + ] = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -278,7 +319,9 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -315,7 +358,9 @@ class IndexSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py index 513a04e..183a762 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_misc.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,12 +54,16 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorData, DynamicTable, DynamicTableRegion, - VectorData, VectorIndex, ) -from .core_nwb_base import TimeSeries + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries + +from .core_nwb_ecephys import ElectrodeGroup + metamodel_version = "None" version = "2.2.4" @@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries): ..., description="""Description of the features represented in TimeSeries::data.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries): data: NDArray[Shape["* num_times"], str] = Field( ..., description="""Annotations made during an experiment.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Data decomposed into frequency bands.""") - metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) bands: str = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) class DecompositionSeriesBands(DynamicTable): @@ -306,18 +349,22 @@ class Units(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Units") spike_times_index: Optional[str] = Field( None, description="""Index into the spike_times dataset.""" ) - spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""") + spike_times: Optional[str] = Field( + None, description="""Spike times for each unit.""" + ) obs_intervals_index: Optional[str] = Field( None, description="""Index into the obs_intervals dataset.""" ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field( - None, description="""Observation intervals for each unit.""" + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" ) - electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""") electrodes: Optional[str] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", @@ -337,7 +384,9 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py index 5ebd4df..ab92e42 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ogen.py @@ -1,25 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -27,8 +56,11 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBContainer, TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.2.4" @@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries): data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py index 19c2dd7..053e108 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_ophys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,17 +53,23 @@ if TYPE_CHECKING: import numpy as np +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + from ...hdmf_common.v1_1_3.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, + VectorData, VectorIndex, + DynamicTable, ) + from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, - NWBDataInterface, TimeSeries, + NWBDataInterface, + TimeSeriesSync, ) -from .core_nwb_image import ImageSeries + metamodel_version = "None" version = "2.2.4" @@ -56,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -113,7 +146,9 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -155,7 +190,9 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -212,7 +249,9 @@ class DfOverF(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -222,7 +261,9 @@ class Fluorescence(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -232,7 +273,9 @@ class ImageSegmentation(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(default_factory=dict) + children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field( + default_factory=dict + ) name: str = Field(...) @@ -252,14 +295,18 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) - pixel_mask_index: Optional[str] = Field(None, description="""Index into pixel_mask.""") - pixel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + pixel_mask_index: Optional[str] = Field( + None, description="""Index into pixel_mask.""" + ) + pixel_mask: Optional[str] = Field( + None, description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) - voxel_mask_index: Optional[str] = Field(None, description="""Index into voxel_mask.""") - voxel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + voxel_mask_index: Optional[str] = Field( + None, description="""Index into voxel_mask.""" + ) + voxel_mask: Optional[str] = Field( + None, description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) reference_images: Optional[List[ImageSeries] | ImageSeries] = Field( @@ -300,6 +347,29 @@ class PlaneSegmentationPixelMaskIndex(VectorIndex): array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None) +class PlaneSegmentationPixelMask(VectorData): + """ + Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["pixel_mask"] = Field("pixel_mask") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class PlaneSegmentationVoxelMaskIndex(VectorIndex): """ Index into voxel_mask. @@ -314,13 +384,39 @@ class PlaneSegmentationVoxelMaskIndex(VectorIndex): array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None) +class PlaneSegmentationVoxelMask(VectorData): + """ + Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["voxel_mask"] = Field("voxel_mask") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class ImagingPlane(NWBContainer): """ An imaging plane and its metadata. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[OpticalChannel] | OpticalChannel] = Field(default_factory=dict) + children: Optional[List[OpticalChannel] | OpticalChannel] = Field( + default_factory=dict + ) name: str = Field(...) @@ -331,8 +427,12 @@ class OpticalChannel(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") + description: str = Field( + ..., description="""Description or other notes about the channel.""" + ) + emission_lambda: float = Field( + ..., description="""Emission wavelength for channel, in nm.""" + ) class MotionCorrection(NWBDataInterface): @@ -374,7 +474,9 @@ Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() PlaneSegmentation.model_rebuild() PlaneSegmentationPixelMaskIndex.model_rebuild() +PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMaskIndex.model_rebuild() +PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py index 124f48e..7b4a04e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/core_nwb_retinotopy.py @@ -1,20 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,6 +55,7 @@ if TYPE_CHECKING: from .core_nwb_base import NWBDataInterface + metamodel_version = "None" version = "2.2.4" @@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ImagingRetinotopy") axis_1_phase_map: str = Field( ..., description="""Phase response to stimulus on the first measured axis.""" ) @@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) @@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py index fb15626..63ef3bc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_4/namespace.py @@ -1,23 +1,222 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from ...hdmf_common.v1_1_3.hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) + +from ...hdmf_common.v1_1_3.hdmf_common_table import ( + Data, + Index, + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + Container, + DynamicTable, +) + +from .core_nwb_retinotopy import ( + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopySignMap, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_base import ( + NWBData, + Image, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, +) + +from .core_nwb_ophys import ( + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + PlaneSegmentation, + PlaneSegmentationPixelMaskIndex, + PlaneSegmentationPixelMask, + PlaneSegmentationVoxelMaskIndex, + PlaneSegmentationVoxelMask, + ImagingPlane, + OpticalChannel, + MotionCorrection, + CorrectedImageStack, +) + +from .core_nwb_device import Device + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, +) + +from .core_nwb_file import ( + ScratchData, + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, + LabMetaData, + Subject, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + metamodel_version = "None" version = "2.2.4" @@ -36,7 +235,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py index 7162820..9f5a179 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_base.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,6 +55,7 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_3.hdmf_common_table import Container, Data, DynamicTable + metamodel_version = "None" version = "2.2.5" @@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +112,9 @@ class Image(NWBData): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -119,7 +149,9 @@ class TimeSeries(NWBDataInterface): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -222,7 +254,7 @@ class Images(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Images") description: Optional[str] = Field( None, description="""Description of this collection of images.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py index 7278837..f523567 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_behavior.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,10 +55,14 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, TimeSeries, ) + from .core_nwb_misc import IntervalSeries + metamodel_version = "None" version = "2.2.5" @@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries): None, description="""Description defining what exactly 'straight-ahead' means.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -196,7 +235,9 @@ class Position(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py index 8b7a031..2041d19 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_device.py @@ -1,26 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .core_nwb_base import NWBContainer + metamodel_version = "None" version = "2.2.5" @@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py index aa89d69..3c9489f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ecephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,17 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTableRegion +from ...hdmf_common.v1_1_3.hdmf_common_table import DynamicTable, DynamicTableRegion + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, NWBDataInterface, + TimeSeriesSync, TimeSeries, + NWBContainer, ) + metamodel_version = "None" version = "2.2.5" @@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +121,9 @@ class ElectricalSeries(TimeSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -164,7 +197,9 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -193,14 +228,16 @@ class FeatureExtraction(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("FeatureExtraction") description: NDArray[Shape["* num_features"], str] = Field( ..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""", ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( - ..., - description="""Multi-dimensional array of features extracted from each event.""", + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) ) times: NDArray[Shape["* num_events"], float] = Field( ..., @@ -242,7 +279,7 @@ class EventDetection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("EventDetection") detection_method: str = Field( ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", @@ -262,7 +299,9 @@ class EventWaveform(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -272,7 +311,9 @@ class FilteredEphys(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -282,7 +323,9 @@ class LFP(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -293,23 +336,37 @@ class ElectrodeGroup(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) location: Optional[str] = Field( None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) - position: Optional[Any] = Field( + position: Optional[str] = Field( None, description="""stereotaxic or common framework coordinates""" ) +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + class ClusterWaveforms(NWBDataInterface): """ DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ClusterWaveforms") waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) @@ -329,7 +386,7 @@ class Clustering(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Clustering") description: str = Field( ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", @@ -359,5 +416,6 @@ EventWaveform.model_rebuild() FilteredEphys.model_rebuild() LFP.model_rebuild() ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() ClusterWaveforms.model_rebuild() Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py index b0eaa5f..85e50f6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_epoch.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,10 +54,14 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorData, DynamicTable, VectorIndex, ) +from .core_nwb_base import TimeSeries + + metamodel_version = "None" version = "2.2.5" @@ -48,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -85,10 +117,12 @@ class TimeIntervals(DynamicTable): description="""User-defined tags that identify or categorize events.""", ) tags_index: Optional[str] = Field(None, description="""Index for tags.""") - timeseries: Optional[List[Any] | Any] = Field( - default_factory=list, description="""An index into a TimeSeries object.""" + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" ) - timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""") colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -123,6 +157,37 @@ class TimeIntervalsTagsIndex(VectorIndex): array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None) +class TimeIntervalsTimeseries(VectorData): + """ + An index into a TimeSeries object. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["timeseries"] = Field("timeseries") + idx_start: Optional[int] = Field( + None, + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: Optional[int] = Field( + None, + description="""Number of data samples available in this time series, during this epoch.""", + ) + timeseries: Optional[str] = Field( + None, description="""the TimeSeries that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class TimeIntervalsTimeseriesIndex(VectorIndex): """ Index for timeseries. @@ -141,4 +206,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TimeIntervals.model_rebuild() TimeIntervalsTagsIndex.model_rebuild() +TimeIntervalsTimeseries.model_rebuild() TimeIntervalsTimeseriesIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py index db7c900..3c6eee7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_file.py @@ -1,23 +1,49 @@ from __future__ import annotations - -import sys -from datetime import datetime +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,21 +53,35 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_3.hdmf_common_table import ( - DynamicTable, -) +from .core_nwb_misc import Units + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + from .core_nwb_base import ( NWBContainer, NWBData, - NWBDataInterface, - ProcessingModule, TimeSeries, + ProcessingModule, + NWBDataInterface, ) -from .core_nwb_device import Device -from .core_nwb_epoch import TimeIntervals -from .core_nwb_ogen import OptogeneticStimulusSite + from .core_nwb_ophys import ImagingPlane +from .core_nwb_icephys import SweepTable, IntracellularElectrode + +from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorIndex, + VectorData, + DynamicTable, +) + +from .core_nwb_ogen import OptogeneticStimulusSite + + metamodel_version = "None" version = "2.2.5" @@ -60,7 +100,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -201,7 +241,9 @@ class NWBFileGeneral(ConfiguredBaseModel): keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( None, description="""Terms to search over.""" ) - lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""") + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) notes: Optional[str] = Field(None, description="""Notes about the experiment.""") pharmacology: Optional[str] = Field( None, @@ -214,7 +256,9 @@ class NWBFileGeneral(ConfiguredBaseModel): related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( None, description="""Publication information. PMID, DOI, URL, etc.""" ) - session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""") + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) slices: Optional[str] = Field( None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", @@ -253,9 +297,11 @@ class NWBFileGeneral(ConfiguredBaseModel): intracellular_ephys: Optional[str] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) - optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field( - default_factory=dict, - description="""Metadata describing optogenetic stimuluation.""", + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) ) optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( default_factory=dict, description="""Metadata related to optophysiology.""" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py index a9cdfb4..3c48641 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_icephys.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,15 +53,20 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_1_3.hdmf_common_table import ( - DynamicTable, - VectorIndex, -) from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, TimeSeries, + TimeSeriesSync, ) +from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorIndex, + VectorData, + DynamicTable, +) + + metamodel_version = "None" version = "2.2.5" @@ -52,7 +85,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -90,7 +123,9 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -139,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded voltage.""") - bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) @@ -155,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -203,8 +244,12 @@ class IZeroClampSeries(CurrentClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) @@ -220,7 +265,9 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -266,7 +313,9 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -315,8 +364,12 @@ class VoltageClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded current.""") - capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""") - capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) resistance_comp_bandwidth: Optional[str] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -343,7 +396,9 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -474,7 +529,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp") + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) unit: Optional[str] = Field( None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", @@ -501,7 +558,9 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -553,7 +612,9 @@ class IntracellularElectrode(NWBContainer): ..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", ) - filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""") + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) initial_access_resistance: Optional[str] = Field( None, description="""Initial access resistance.""" ) @@ -561,8 +622,12 @@ class IntracellularElectrode(NWBContainer): None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) - resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""") - seal: Optional[str] = Field(None, description="""Information about seal used for recording.""") + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py index 451ca6e..905c01b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_image.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,7 +53,8 @@ if TYPE_CHECKING: import numpy as np -from .core_nwb_base import Image, TimeSeries +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + metamodel_version = "None" version = "2.2.5" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -74,7 +103,9 @@ class GrayscaleImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -94,7 +125,9 @@ class RGBImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -114,7 +147,9 @@ class RGBAImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -148,7 +183,9 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -262,7 +301,9 @@ class OpticalSeries(ImageSeries): data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") + ] = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -278,7 +319,9 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -315,7 +358,9 @@ class IndexSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py index 70b8e14..b10ffc6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_misc.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,12 +54,16 @@ if TYPE_CHECKING: from ...hdmf_common.v1_1_3.hdmf_common_table import ( + VectorData, DynamicTable, DynamicTableRegion, - VectorData, VectorIndex, ) -from .core_nwb_base import TimeSeries + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, TimeSeries + +from .core_nwb_ecephys import ElectrodeGroup + metamodel_version = "None" version = "2.2.5" @@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries): ..., description="""Description of the features represented in TimeSeries::data.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries): data: NDArray[Shape["* num_times"], str] = Field( ..., description="""Annotations made during an experiment.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -214,12 +251,16 @@ class DecompositionSeries(TimeSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Data decomposed into frequency bands.""") - metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) bands: str = Field( ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -257,7 +298,9 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) class DecompositionSeriesBands(DynamicTable): @@ -306,18 +349,22 @@ class Units(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Units") spike_times_index: Optional[str] = Field( None, description="""Index into the spike_times dataset.""" ) - spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""") + spike_times: Optional[str] = Field( + None, description="""Spike times for each unit.""" + ) obs_intervals_index: Optional[str] = Field( None, description="""Index into the obs_intervals dataset.""" ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field( - None, description="""Observation intervals for each unit.""" + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" ) - electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""") electrodes: Optional[str] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", @@ -337,7 +384,9 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py index bda06d3..929719c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ogen.py @@ -1,25 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -27,8 +56,11 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBContainer, TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.2.5" @@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries): data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py index b0266dc..c207f5e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_ophys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,17 +53,23 @@ if TYPE_CHECKING: import numpy as np +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + from ...hdmf_common.v1_1_3.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, + VectorData, VectorIndex, + DynamicTable, ) + from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, - NWBDataInterface, TimeSeries, + NWBDataInterface, + TimeSeriesSync, ) -from .core_nwb_image import ImageSeries + metamodel_version = "None" version = "2.2.5" @@ -56,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -113,7 +146,9 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -155,7 +190,9 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -212,7 +249,9 @@ class DfOverF(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -222,7 +261,9 @@ class Fluorescence(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -232,7 +273,9 @@ class ImageSegmentation(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(default_factory=dict) + children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field( + default_factory=dict + ) name: str = Field(...) @@ -252,14 +295,18 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) - pixel_mask_index: Optional[str] = Field(None, description="""Index into pixel_mask.""") - pixel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + pixel_mask_index: Optional[str] = Field( + None, description="""Index into pixel_mask.""" + ) + pixel_mask: Optional[str] = Field( + None, description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) - voxel_mask_index: Optional[str] = Field(None, description="""Index into voxel_mask.""") - voxel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + voxel_mask_index: Optional[str] = Field( + None, description="""Index into voxel_mask.""" + ) + voxel_mask: Optional[str] = Field( + None, description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) reference_images: Optional[List[ImageSeries] | ImageSeries] = Field( @@ -300,6 +347,29 @@ class PlaneSegmentationPixelMaskIndex(VectorIndex): array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None) +class PlaneSegmentationPixelMask(VectorData): + """ + Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["pixel_mask"] = Field("pixel_mask") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class PlaneSegmentationVoxelMaskIndex(VectorIndex): """ Index into voxel_mask. @@ -314,13 +384,39 @@ class PlaneSegmentationVoxelMaskIndex(VectorIndex): array: Optional[NDArray[Shape["* num_rows"], Any]] = Field(None) +class PlaneSegmentationVoxelMask(VectorData): + """ + Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["voxel_mask"] = Field("voxel_mask") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class ImagingPlane(NWBContainer): """ An imaging plane and its metadata. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[OpticalChannel] | OpticalChannel] = Field(default_factory=dict) + children: Optional[List[OpticalChannel] | OpticalChannel] = Field( + default_factory=dict + ) name: str = Field(...) @@ -331,8 +427,12 @@ class OpticalChannel(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") + description: str = Field( + ..., description="""Description or other notes about the channel.""" + ) + emission_lambda: float = Field( + ..., description="""Emission wavelength for channel, in nm.""" + ) class MotionCorrection(NWBDataInterface): @@ -374,7 +474,9 @@ Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() PlaneSegmentation.model_rebuild() PlaneSegmentationPixelMaskIndex.model_rebuild() +PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMaskIndex.model_rebuild() +PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py index 56dbf57..c0adf23 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/core_nwb_retinotopy.py @@ -1,20 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,6 +55,7 @@ if TYPE_CHECKING: from .core_nwb_base import NWBDataInterface + metamodel_version = "None" version = "2.2.5" @@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ImagingRetinotopy") axis_1_phase_map: str = Field( ..., description="""Phase response to stimulus on the first measured axis.""" ) @@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) @@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py index f28d7ef..4394bc6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_2_5/namespace.py @@ -1,23 +1,222 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from ...hdmf_common.v1_1_3.hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) + +from ...hdmf_common.v1_1_3.hdmf_common_table import ( + Data, + Index, + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + Container, + DynamicTable, +) + +from .core_nwb_retinotopy import ( + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopySignMap, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_base import ( + NWBData, + Image, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, +) + +from .core_nwb_ophys import ( + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + PlaneSegmentation, + PlaneSegmentationPixelMaskIndex, + PlaneSegmentationPixelMask, + PlaneSegmentationVoxelMaskIndex, + PlaneSegmentationVoxelMask, + ImagingPlane, + OpticalChannel, + MotionCorrection, + CorrectedImageStack, +) + +from .core_nwb_device import Device + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, +) + +from .core_nwb_file import ( + ScratchData, + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, + LabMetaData, + Subject, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + metamodel_version = "None" version = "2.2.5" @@ -36,7 +235,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py index af83624..165a4d6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_base.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,8 +54,10 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data + from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable + metamodel_version = "None" version = "2.3.0" @@ -47,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -85,7 +114,9 @@ class Image(NWBData): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -120,7 +151,9 @@ class TimeSeries(NWBDataInterface): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -227,7 +260,7 @@ class Images(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Images") description: Optional[str] = Field( None, description="""Description of this collection of images.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py index a5d530b..7b14b83 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_behavior.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,10 +55,14 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, TimeSeries, ) + from .core_nwb_misc import IntervalSeries + metamodel_version = "None" version = "2.3.0" @@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries): None, description="""Description defining what exactly 'straight-ahead' means.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -196,7 +235,9 @@ class Position(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py index e835622..310c48d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_device.py @@ -1,26 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .core_nwb_base import NWBContainer + metamodel_version = "None" version = "2.3.0" @@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py index e7fbeae..80c9fa1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ecephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,17 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTableRegion +from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, DynamicTableRegion + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, NWBDataInterface, + TimeSeriesSync, TimeSeries, + NWBContainer, ) + metamodel_version = "None" version = "2.3.0" @@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -94,7 +125,9 @@ class ElectricalSeries(TimeSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -172,7 +205,9 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -201,14 +236,16 @@ class FeatureExtraction(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("FeatureExtraction") description: NDArray[Shape["* num_features"], str] = Field( ..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""", ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( - ..., - description="""Multi-dimensional array of features extracted from each event.""", + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) ) times: NDArray[Shape["* num_events"], float] = Field( ..., @@ -250,7 +287,7 @@ class EventDetection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("EventDetection") detection_method: str = Field( ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", @@ -270,7 +307,9 @@ class EventWaveform(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -280,7 +319,9 @@ class FilteredEphys(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -290,7 +331,9 @@ class LFP(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -301,23 +344,37 @@ class ElectrodeGroup(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) location: Optional[str] = Field( None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) - position: Optional[Any] = Field( + position: Optional[str] = Field( None, description="""stereotaxic or common framework coordinates""" ) +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + class ClusterWaveforms(NWBDataInterface): """ DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ClusterWaveforms") waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) @@ -337,7 +394,7 @@ class Clustering(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Clustering") description: str = Field( ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", @@ -367,5 +424,6 @@ EventWaveform.model_rebuild() FilteredEphys.model_rebuild() LFP.model_rebuild() ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() ClusterWaveforms.model_rebuild() Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py index ddcd0d7..17002a4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_epoch.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,10 +54,14 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorData, DynamicTable, VectorIndex, ) +from .core_nwb_base import TimeSeries + + metamodel_version = "None" version = "2.3.0" @@ -49,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,10 +117,12 @@ class TimeIntervals(DynamicTable): description="""User-defined tags that identify or categorize events.""", ) tags_index: Optional[str] = Field(None, description="""Index for tags.""") - timeseries: Optional[List[Any] | Any] = Field( - default_factory=list, description="""An index into a TimeSeries object.""" + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" ) - timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""") colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -131,6 +164,37 @@ class TimeIntervalsTagsIndex(VectorIndex): ] = Field(None) +class TimeIntervalsTimeseries(VectorData): + """ + An index into a TimeSeries object. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["timeseries"] = Field("timeseries") + idx_start: Optional[int] = Field( + None, + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: Optional[int] = Field( + None, + description="""Number of data samples available in this time series, during this epoch.""", + ) + timeseries: Optional[str] = Field( + None, description="""the TimeSeries that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class TimeIntervalsTimeseriesIndex(VectorIndex): """ Index for timeseries. @@ -159,4 +223,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TimeIntervals.model_rebuild() TimeIntervalsTagsIndex.model_rebuild() +TimeIntervalsTimeseries.model_rebuild() TimeIntervalsTimeseriesIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py index 23f6dcc..9ca43e8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_file.py @@ -1,23 +1,49 @@ from __future__ import annotations - -import sys -from datetime import datetime +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,19 +53,31 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable +from .core_nwb_misc import Units + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + from .core_nwb_base import ( NWBContainer, NWBData, - NWBDataInterface, - ProcessingModule, TimeSeries, + ProcessingModule, + NWBDataInterface, ) -from .core_nwb_device import Device -from .core_nwb_epoch import TimeIntervals -from .core_nwb_ogen import OptogeneticStimulusSite + from .core_nwb_ophys import ImagingPlane +from .core_nwb_icephys import SweepTable, IntracellularElectrode + +from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable + +from .core_nwb_ogen import OptogeneticStimulusSite + + metamodel_version = "None" version = "2.3.0" @@ -58,7 +96,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -199,7 +237,9 @@ class NWBFileGeneral(ConfiguredBaseModel): keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( None, description="""Terms to search over.""" ) - lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""") + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) notes: Optional[str] = Field(None, description="""Notes about the experiment.""") pharmacology: Optional[str] = Field( None, @@ -212,7 +252,9 @@ class NWBFileGeneral(ConfiguredBaseModel): related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( None, description="""Publication information. PMID, DOI, URL, etc.""" ) - session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""") + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) slices: Optional[str] = Field( None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", @@ -251,9 +293,11 @@ class NWBFileGeneral(ConfiguredBaseModel): intracellular_ephys: Optional[str] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) - optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field( - default_factory=dict, - description="""Metadata describing optogenetic stimuluation.""", + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) ) optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( default_factory=dict, description="""Metadata related to optophysiology.""" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py index bf12939..2e33891 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_icephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,15 +53,20 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import ( - DynamicTable, - VectorIndex, -) from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, TimeSeries, + TimeSeriesSync, ) +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorIndex, + VectorData, + DynamicTable, +) + + metamodel_version = "None" version = "2.3.0" @@ -53,7 +85,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -91,7 +123,9 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -140,8 +174,12 @@ class CurrentClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded voltage.""") - bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) @@ -156,7 +194,9 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -208,8 +248,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", ) - bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) @@ -222,7 +266,9 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -268,7 +314,9 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -317,8 +365,12 @@ class VoltageClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded current.""") - capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""") - capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) resistance_comp_bandwidth: Optional[str] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -345,7 +397,9 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -476,7 +530,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp") + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) unit: Optional[str] = Field( None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", @@ -503,7 +559,9 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -555,7 +613,9 @@ class IntracellularElectrode(NWBContainer): ..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", ) - filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""") + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) initial_access_resistance: Optional[str] = Field( None, description="""Initial access resistance.""" ) @@ -563,8 +623,12 @@ class IntracellularElectrode(NWBContainer): None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) - resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""") - seal: Optional[str] = Field(None, description="""Information about seal used for recording.""") + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py index 2ab2477..0387152 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_image.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,7 +53,8 @@ if TYPE_CHECKING: import numpy as np -from .core_nwb_base import Image, TimeSeries +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + metamodel_version = "None" version = "2.3.0" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -74,7 +103,9 @@ class GrayscaleImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -94,7 +125,9 @@ class RGBImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -114,7 +147,9 @@ class RGBAImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -148,7 +183,9 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -213,7 +250,9 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -262,7 +301,9 @@ class OpticalSeries(ImageSeries): data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") + ] = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -278,7 +319,9 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -315,7 +358,9 @@ class IndexSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py index 060312b..f1de761 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_misc.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,12 +54,16 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, - VectorData, VectorIndex, + VectorData, + DynamicTable, ) -from .core_nwb_base import TimeSeries + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeries, TimeSeriesSync + metamodel_version = "None" version = "2.3.0" @@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries): ..., description="""Description of the features represented in TimeSeries::data.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries): data: NDArray[Shape["* num_times"], str] = Field( ..., description="""Annotations made during an experiment.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -214,7 +251,9 @@ class DecompositionSeries(TimeSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Data decomposed into frequency bands.""") - metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) source_channels: Optional[str] = Field( None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", @@ -223,7 +262,9 @@ class DecompositionSeries(TimeSeries): ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -261,7 +302,9 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) class DecompositionSeriesSourceChannels(DynamicTableRegion): @@ -331,18 +374,22 @@ class Units(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Units") spike_times_index: Optional[str] = Field( None, description="""Index into the spike_times dataset.""" ) - spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""") + spike_times: Optional[str] = Field( + None, description="""Spike times for each unit.""" + ) obs_intervals_index: Optional[str] = Field( None, description="""Index into the obs_intervals dataset.""" ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field( - None, description="""Observation intervals for each unit.""" + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" ) - electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""") electrodes: Optional[str] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", @@ -362,10 +409,14 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + ) ) waveforms_index: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py index 2123ebe..4b04266 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ogen.py @@ -1,25 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -27,8 +56,11 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBContainer, TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.3.0" @@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries): data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py index 5bb1c84..cb1fefc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_ophys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,17 +53,23 @@ if TYPE_CHECKING: import numpy as np +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + from ...hdmf_common.v1_5_0.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, + VectorData, VectorIndex, + DynamicTable, ) + from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, - NWBDataInterface, TimeSeries, + NWBDataInterface, + TimeSeriesSync, ) -from .core_nwb_image import ImageSeries + metamodel_version = "None" version = "2.3.0" @@ -56,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -113,7 +146,9 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -155,7 +190,9 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -212,7 +249,9 @@ class DfOverF(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -222,7 +261,9 @@ class Fluorescence(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -232,7 +273,9 @@ class ImageSegmentation(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(default_factory=dict) + children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field( + default_factory=dict + ) name: str = Field(...) @@ -252,14 +295,18 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) - pixel_mask_index: Optional[str] = Field(None, description="""Index into pixel_mask.""") - pixel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + pixel_mask_index: Optional[str] = Field( + None, description="""Index into pixel_mask.""" + ) + pixel_mask: Optional[str] = Field( + None, description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) - voxel_mask_index: Optional[str] = Field(None, description="""Index into voxel_mask.""") - voxel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + voxel_mask_index: Optional[str] = Field( + None, description="""Index into voxel_mask.""" + ) + voxel_mask: Optional[str] = Field( + None, description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) reference_images: Optional[List[ImageSeries] | ImageSeries] = Field( @@ -307,6 +354,29 @@ class PlaneSegmentationPixelMaskIndex(VectorIndex): ] = Field(None) +class PlaneSegmentationPixelMask(VectorData): + """ + Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["pixel_mask"] = Field("pixel_mask") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class PlaneSegmentationVoxelMaskIndex(VectorIndex): """ Index into voxel_mask. @@ -331,13 +401,39 @@ class PlaneSegmentationVoxelMaskIndex(VectorIndex): ] = Field(None) +class PlaneSegmentationVoxelMask(VectorData): + """ + Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["voxel_mask"] = Field("voxel_mask") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class ImagingPlane(NWBContainer): """ An imaging plane and its metadata. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[OpticalChannel] | OpticalChannel] = Field(default_factory=dict) + children: Optional[List[OpticalChannel] | OpticalChannel] = Field( + default_factory=dict + ) name: str = Field(...) @@ -348,8 +444,12 @@ class OpticalChannel(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") + description: str = Field( + ..., description="""Description or other notes about the channel.""" + ) + emission_lambda: float = Field( + ..., description="""Emission wavelength for channel, in nm.""" + ) class MotionCorrection(NWBDataInterface): @@ -391,7 +491,9 @@ Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() PlaneSegmentation.model_rebuild() PlaneSegmentationPixelMaskIndex.model_rebuild() +PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMaskIndex.model_rebuild() +PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py index 351e630..ce18a7c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/core_nwb_retinotopy.py @@ -1,20 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,6 +55,7 @@ if TYPE_CHECKING: from .core_nwb_base import NWBDataInterface + metamodel_version = "None" version = "2.3.0" @@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ImagingRetinotopy") axis_1_phase_map: str = Field( ..., description="""Phase response to stimulus on the first measured axis.""" ) @@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) @@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py index de5e389..8985d6a 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_3_0/namespace.py @@ -1,23 +1,231 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesEntities, + ExternalResourcesResources, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) + +from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix + +from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer + +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData + +from .core_nwb_retinotopy import ( + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopySignMap, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_base import ( + NWBData, + Image, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, +) + +from .core_nwb_ophys import ( + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + PlaneSegmentation, + PlaneSegmentationPixelMaskIndex, + PlaneSegmentationPixelMask, + PlaneSegmentationVoxelMaskIndex, + PlaneSegmentationVoxelMask, + ImagingPlane, + OpticalChannel, + MotionCorrection, + CorrectedImageStack, +) + +from .core_nwb_device import Device + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesSourceChannels, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, + UnitsWaveformsIndex, + UnitsWaveformsIndexIndex, +) + +from .core_nwb_file import ( + ScratchData, + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, + LabMetaData, + Subject, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + metamodel_version = "None" version = "2.3.0" @@ -36,7 +244,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py index 3c4bcde..0fe3f04 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_base.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,8 +53,10 @@ if TYPE_CHECKING: import numpy as np +from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable + from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, VectorData + metamodel_version = "None" version = "2.4.0" @@ -47,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -81,7 +110,18 @@ class TimeSeriesReferenceVectorData(VectorData): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("timeseries") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -105,7 +145,9 @@ class Image(NWBData): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -140,7 +182,9 @@ class TimeSeries(NWBDataInterface): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -247,7 +291,7 @@ class Images(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Images") description: Optional[str] = Field( None, description="""Description of this collection of images.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py index 5240a0f..08a2015 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_behavior.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,10 +55,14 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, TimeSeries, ) + from .core_nwb_misc import IntervalSeries + metamodel_version = "None" version = "2.4.0" @@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries): None, description="""Description defining what exactly 'straight-ahead' means.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -136,7 +169,9 @@ class BehavioralEpochs(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -176,7 +211,9 @@ class EyeTracking(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -186,7 +223,9 @@ class CompassDirection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -196,7 +235,9 @@ class Position(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py index 6094cec..dad1321 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_device.py @@ -1,26 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .core_nwb_base import NWBContainer + metamodel_version = "None" version = "2.4.0" @@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py index 77b6646..d6373a7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ecephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,17 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTableRegion +from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, DynamicTableRegion + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, NWBDataInterface, + TimeSeriesSync, TimeSeries, + NWBContainer, ) + metamodel_version = "None" version = "2.4.0" @@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -94,7 +125,9 @@ class ElectricalSeries(TimeSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -172,7 +205,9 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -201,14 +236,16 @@ class FeatureExtraction(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("FeatureExtraction") description: NDArray[Shape["* num_features"], str] = Field( ..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""", ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( - ..., - description="""Multi-dimensional array of features extracted from each event.""", + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) ) times: NDArray[Shape["* num_events"], float] = Field( ..., @@ -250,7 +287,7 @@ class EventDetection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("EventDetection") detection_method: str = Field( ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", @@ -270,7 +307,9 @@ class EventWaveform(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -280,7 +319,9 @@ class FilteredEphys(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -290,7 +331,9 @@ class LFP(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -301,23 +344,37 @@ class ElectrodeGroup(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) location: Optional[str] = Field( None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) - position: Optional[Any] = Field( + position: Optional[str] = Field( None, description="""stereotaxic or common framework coordinates""" ) +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + class ClusterWaveforms(NWBDataInterface): """ DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ClusterWaveforms") waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) @@ -337,7 +394,7 @@ class Clustering(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Clustering") description: str = Field( ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", @@ -367,5 +424,6 @@ EventWaveform.model_rebuild() FilteredEphys.model_rebuild() LFP.model_rebuild() ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() ClusterWaveforms.model_rebuild() Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py index 9b423f6..8c1af60 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_epoch.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,10 +54,14 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorData, DynamicTable, VectorIndex, ) +from .core_nwb_base import TimeSeries + + metamodel_version = "None" version = "2.4.0" @@ -49,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,10 +117,12 @@ class TimeIntervals(DynamicTable): description="""User-defined tags that identify or categorize events.""", ) tags_index: Optional[str] = Field(None, description="""Index for tags.""") - timeseries: Optional[List[Any] | Any] = Field( - default_factory=list, description="""An index into a TimeSeries object.""" + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" ) - timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""") colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -131,6 +164,37 @@ class TimeIntervalsTagsIndex(VectorIndex): ] = Field(None) +class TimeIntervalsTimeseries(VectorData): + """ + An index into a TimeSeries object. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["timeseries"] = Field("timeseries") + idx_start: Optional[int] = Field( + None, + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: Optional[int] = Field( + None, + description="""Number of data samples available in this time series, during this epoch.""", + ) + timeseries: Optional[str] = Field( + None, description="""the TimeSeries that this index applies to.""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class TimeIntervalsTimeseriesIndex(VectorIndex): """ Index for timeseries. @@ -159,4 +223,5 @@ class TimeIntervalsTimeseriesIndex(VectorIndex): # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model TimeIntervals.model_rebuild() TimeIntervalsTagsIndex.model_rebuild() +TimeIntervalsTimeseries.model_rebuild() TimeIntervalsTimeseriesIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py index 43c1538..758be41 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_file.py @@ -1,23 +1,49 @@ from __future__ import annotations - -import sys -from datetime import datetime +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,19 +53,39 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable +from .core_nwb_misc import Units + +from .core_nwb_icephys import ( + RepetitionsTable, + ExperimentalConditionsTable, + SimultaneousRecordingsTable, + IntracellularRecordingsTable, + SweepTable, + SequentialRecordingsTable, + IntracellularElectrode, +) + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + from .core_nwb_base import ( NWBContainer, NWBData, - NWBDataInterface, - ProcessingModule, TimeSeries, + ProcessingModule, + NWBDataInterface, ) -from .core_nwb_device import Device -from .core_nwb_epoch import TimeIntervals -from .core_nwb_ogen import OptogeneticStimulusSite + from .core_nwb_ophys import ImagingPlane +from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable + +from .core_nwb_ogen import OptogeneticStimulusSite + + metamodel_version = "None" version = "2.4.0" @@ -58,7 +104,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -199,7 +245,9 @@ class NWBFileGeneral(ConfiguredBaseModel): keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( None, description="""Terms to search over.""" ) - lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""") + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) notes: Optional[str] = Field(None, description="""Notes about the experiment.""") pharmacology: Optional[str] = Field( None, @@ -212,7 +260,9 @@ class NWBFileGeneral(ConfiguredBaseModel): related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( None, description="""Publication information. PMID, DOI, URL, etc.""" ) - session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""") + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) slices: Optional[str] = Field( None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", @@ -251,9 +301,11 @@ class NWBFileGeneral(ConfiguredBaseModel): intracellular_ephys: Optional[str] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) - optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field( - default_factory=dict, - description="""Metadata describing optogenetic stimuluation.""", + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) ) optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( default_factory=dict, description="""Metadata related to optophysiology.""" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py index 9e97309..3f9d392 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_icephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,17 +54,22 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( - AlignedDynamicTable, - DynamicTable, DynamicTableRegion, + AlignedDynamicTable, VectorIndex, + VectorData, + DynamicTable, ) + from .core_nwb_base import ( + TimeSeriesStartingTime, + TimeSeriesReferenceVectorData, NWBContainer, TimeSeries, - TimeSeriesReferenceVectorData, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.4.0" @@ -56,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -94,7 +126,9 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -143,8 +177,12 @@ class CurrentClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded voltage.""") - bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) @@ -159,7 +197,9 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -211,8 +251,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", ) - bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) @@ -225,7 +269,9 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -271,7 +317,9 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -320,8 +368,12 @@ class VoltageClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded current.""") - capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""") - capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) resistance_comp_bandwidth: Optional[str] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -348,7 +400,9 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -479,7 +533,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp") + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) unit: Optional[str] = Field( None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", @@ -506,7 +562,9 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -558,7 +616,9 @@ class IntracellularElectrode(NWBContainer): ..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", ) - filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""") + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) initial_access_resistance: Optional[str] = Field( None, description="""Initial access resistance.""" ) @@ -566,8 +626,12 @@ class IntracellularElectrode(NWBContainer): None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) - resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""") - seal: Optional[str] = Field(None, description="""Information about seal used for recording.""") + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) @@ -693,6 +757,17 @@ class IntracellularStimuliTableStimulus(TimeSeriesReferenceVectorData): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["stimulus"] = Field("stimulus") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -741,6 +816,17 @@ class IntracellularResponsesTableResponse(TimeSeriesReferenceVectorData): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["response"] = Field("response") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -777,9 +863,9 @@ class IntracellularRecordingsTable(AlignedDynamicTable): ..., description="""Table for storing intracellular response related metadata.""", ) - children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = ( - Field(default_factory=dict) - ) + children: Optional[ + List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable] + ] = Field(default_factory=dict) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -805,7 +891,9 @@ class SimultaneousRecordingsTable(DynamicTable): ..., description="""A reference to one or more rows in the IntracellularRecordingsTable table.""", ) - recordings_index: str = Field(..., description="""Index dataset for the recordings column.""") + recordings_index: str = Field( + ..., description="""Index dataset for the recordings column.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -936,7 +1024,9 @@ class SequentialRecordingsTableSimultaneousRecordingsIndex(VectorIndex): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["simultaneous_recordings_index"] = Field("simultaneous_recordings_index") + name: Literal["simultaneous_recordings_index"] = Field( + "simultaneous_recordings_index" + ) target: Optional[str] = Field( None, description="""Reference to the target dataset that this index applies to.""", @@ -1044,7 +1134,9 @@ class ExperimentalConditionsTable(DynamicTable): ..., description="""A reference to one or more rows in the RepetitionsTable table.""", ) - repetitions_index: str = Field(..., description="""Index dataset for the repetitions column.""") + repetitions_index: str = Field( + ..., description="""Index dataset for the repetitions column.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py index a23bb98..3fe3751 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_image.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,7 +53,8 @@ if TYPE_CHECKING: import numpy as np -from .core_nwb_base import Image, TimeSeries +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + metamodel_version = "None" version = "2.4.0" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -74,7 +103,9 @@ class GrayscaleImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -94,7 +125,9 @@ class RGBImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -114,7 +147,9 @@ class RGBAImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -149,7 +184,9 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -215,7 +252,9 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -264,7 +303,9 @@ class OpticalSeries(ImageSeries): data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") + ] = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -280,7 +321,9 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -317,7 +360,9 @@ class IndexSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Index of the frame in the referenced ImageSeries.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py index e51ad29..2de3a45 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_misc.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,12 +54,16 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, - VectorData, VectorIndex, + VectorData, + DynamicTable, ) -from .core_nwb_base import TimeSeries + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeries, TimeSeriesSync + metamodel_version = "None" version = "2.4.0" @@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries): ..., description="""Description of the features represented in TimeSeries::data.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries): data: NDArray[Shape["* num_times"], str] = Field( ..., description="""Annotations made during an experiment.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -214,7 +251,9 @@ class DecompositionSeries(TimeSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Data decomposed into frequency bands.""") - metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) source_channels: Optional[str] = Field( None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", @@ -223,7 +262,9 @@ class DecompositionSeries(TimeSeries): ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -261,7 +302,9 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) class DecompositionSeriesSourceChannels(DynamicTableRegion): @@ -331,18 +374,22 @@ class Units(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Units") spike_times_index: Optional[str] = Field( None, description="""Index into the spike_times dataset.""" ) - spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""") + spike_times: Optional[str] = Field( + None, description="""Spike times for each unit.""" + ) obs_intervals_index: Optional[str] = Field( None, description="""Index into the obs_intervals dataset.""" ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field( - None, description="""Observation intervals for each unit.""" + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" ) - electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""") electrodes: Optional[str] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", @@ -362,10 +409,14 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + ) ) waveforms_index: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py index 113f15a..333d0e4 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ogen.py @@ -1,25 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -27,8 +56,11 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBContainer, TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.4.0" @@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries): data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py index 57285d1..0867451 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_ophys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,17 +53,23 @@ if TYPE_CHECKING: import numpy as np +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + from ...hdmf_common.v1_5_0.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, + VectorData, VectorIndex, + DynamicTable, ) + from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, - NWBDataInterface, TimeSeries, + NWBDataInterface, + TimeSeriesSync, ) -from .core_nwb_image import ImageSeries + metamodel_version = "None" version = "2.4.0" @@ -56,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -114,7 +147,9 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -156,7 +191,9 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -213,7 +250,9 @@ class DfOverF(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -223,7 +262,9 @@ class Fluorescence(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -233,7 +274,9 @@ class ImageSegmentation(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(default_factory=dict) + children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field( + default_factory=dict + ) name: str = Field(...) @@ -253,14 +296,18 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) - pixel_mask_index: Optional[str] = Field(None, description="""Index into pixel_mask.""") - pixel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + pixel_mask_index: Optional[str] = Field( + None, description="""Index into pixel_mask.""" + ) + pixel_mask: Optional[str] = Field( + None, description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) - voxel_mask_index: Optional[str] = Field(None, description="""Index into voxel_mask.""") - voxel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + voxel_mask_index: Optional[str] = Field( + None, description="""Index into voxel_mask.""" + ) + voxel_mask: Optional[str] = Field( + None, description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) reference_images: Optional[List[ImageSeries] | ImageSeries] = Field( @@ -308,6 +355,29 @@ class PlaneSegmentationPixelMaskIndex(VectorIndex): ] = Field(None) +class PlaneSegmentationPixelMask(VectorData): + """ + Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["pixel_mask"] = Field("pixel_mask") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class PlaneSegmentationVoxelMaskIndex(VectorIndex): """ Index into voxel_mask. @@ -332,13 +402,39 @@ class PlaneSegmentationVoxelMaskIndex(VectorIndex): ] = Field(None) +class PlaneSegmentationVoxelMask(VectorData): + """ + Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["voxel_mask"] = Field("voxel_mask") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class ImagingPlane(NWBContainer): """ An imaging plane and its metadata. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[OpticalChannel] | OpticalChannel] = Field(default_factory=dict) + children: Optional[List[OpticalChannel] | OpticalChannel] = Field( + default_factory=dict + ) name: str = Field(...) @@ -349,8 +445,12 @@ class OpticalChannel(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") + description: str = Field( + ..., description="""Description or other notes about the channel.""" + ) + emission_lambda: float = Field( + ..., description="""Emission wavelength for channel, in nm.""" + ) class MotionCorrection(NWBDataInterface): @@ -392,7 +492,9 @@ Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() PlaneSegmentation.model_rebuild() PlaneSegmentationPixelMaskIndex.model_rebuild() +PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMaskIndex.model_rebuild() +PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py index dc72d29..0509485 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/core_nwb_retinotopy.py @@ -1,20 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,6 +55,7 @@ if TYPE_CHECKING: from .core_nwb_base import NWBDataInterface + metamodel_version = "None" version = "2.4.0" @@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ImagingRetinotopy") axis_1_phase_map: str = Field( ..., description="""Phase response to stimulus on the first measured axis.""" ) @@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) @@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py index c030d63..6e2b7c8 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_4_0/namespace.py @@ -1,23 +1,250 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesEntities, + ExternalResourcesResources, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) + +from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix + +from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer + +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData + +from .core_nwb_retinotopy import ( + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopySignMap, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_base import ( + NWBData, + TimeSeriesReferenceVectorData, + Image, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, +) + +from .core_nwb_ophys import ( + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + PlaneSegmentation, + PlaneSegmentationPixelMaskIndex, + PlaneSegmentationPixelMask, + PlaneSegmentationVoxelMaskIndex, + PlaneSegmentationVoxelMask, + ImagingPlane, + OpticalChannel, + MotionCorrection, + CorrectedImageStack, +) + +from .core_nwb_device import Device + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, + IntracellularElectrodesTable, + IntracellularStimuliTable, + IntracellularStimuliTableStimulus, + IntracellularResponsesTable, + IntracellularResponsesTableResponse, + IntracellularRecordingsTable, + SimultaneousRecordingsTable, + SimultaneousRecordingsTableRecordings, + SimultaneousRecordingsTableRecordingsIndex, + SequentialRecordingsTable, + SequentialRecordingsTableSimultaneousRecordings, + SequentialRecordingsTableSimultaneousRecordingsIndex, + RepetitionsTable, + RepetitionsTableSequentialRecordings, + RepetitionsTableSequentialRecordingsIndex, + ExperimentalConditionsTable, + ExperimentalConditionsTableRepetitions, + ExperimentalConditionsTableRepetitionsIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesSourceChannels, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, + UnitsWaveformsIndex, + UnitsWaveformsIndexIndex, +) + +from .core_nwb_file import ( + ScratchData, + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, + LabMetaData, + Subject, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + metamodel_version = "None" version = "2.4.0" @@ -36,7 +263,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py index e315cb4..196940c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_base.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,8 +53,10 @@ if TYPE_CHECKING: import numpy as np +from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable + from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, VectorData + metamodel_version = "None" version = "2.5.0" @@ -47,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -81,7 +110,18 @@ class TimeSeriesReferenceVectorData(VectorData): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("timeseries") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -105,7 +145,9 @@ class Image(NWBData): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -153,7 +195,9 @@ class TimeSeries(NWBDataInterface): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -264,7 +308,7 @@ class Images(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Images") description: Optional[str] = Field( None, description="""Description of this collection of images.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py index 5122d75..59f40b6 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_behavior.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,10 +55,14 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, TimeSeries, ) + from .core_nwb_misc import IntervalSeries + metamodel_version = "None" version = "2.5.0" @@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries): None, description="""Description defining what exactly 'straight-ahead' means.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -138,7 +171,9 @@ class BehavioralEpochs(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -178,7 +213,9 @@ class EyeTracking(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -188,7 +225,9 @@ class CompassDirection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -198,7 +237,9 @@ class Position(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py index e4c5d84..c10e955 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_device.py @@ -1,26 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .core_nwb_base import NWBContainer + metamodel_version = "None" version = "2.5.0" @@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py index 84538ee..801174d 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ecephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,17 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTableRegion +from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, DynamicTableRegion + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, NWBDataInterface, + TimeSeriesSync, TimeSeries, + NWBContainer, ) + metamodel_version = "None" version = "2.5.0" @@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -94,7 +125,9 @@ class ElectricalSeries(TimeSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -172,7 +205,9 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -201,14 +236,16 @@ class FeatureExtraction(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("FeatureExtraction") description: NDArray[Shape["* num_features"], str] = Field( ..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""", ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( - ..., - description="""Multi-dimensional array of features extracted from each event.""", + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) ) times: NDArray[Shape["* num_events"], float] = Field( ..., @@ -250,7 +287,7 @@ class EventDetection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("EventDetection") detection_method: str = Field( ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", @@ -270,7 +307,9 @@ class EventWaveform(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -280,7 +319,9 @@ class FilteredEphys(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -290,7 +331,9 @@ class LFP(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -301,23 +344,37 @@ class ElectrodeGroup(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) location: Optional[str] = Field( None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) - position: Optional[Any] = Field( + position: Optional[str] = Field( None, description="""stereotaxic or common framework coordinates""" ) +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + class ClusterWaveforms(NWBDataInterface): """ DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ClusterWaveforms") waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) @@ -337,7 +394,7 @@ class Clustering(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Clustering") description: str = Field( ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", @@ -367,5 +424,6 @@ EventWaveform.model_rebuild() FilteredEphys.model_rebuild() LFP.model_rebuild() ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() ClusterWaveforms.model_rebuild() Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py index 3fb3fc4..099a19b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_epoch.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,10 +54,13 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorData, DynamicTable, VectorIndex, ) -from .core_nwb_base import TimeSeriesReferenceVectorData + +from .core_nwb_base import TimeSeriesReferenceVectorData, TimeSeries + metamodel_version = "None" version = "2.5.0" @@ -50,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -87,8 +117,12 @@ class TimeIntervals(DynamicTable): description="""User-defined tags that identify or categorize events.""", ) tags_index: Optional[str] = Field(None, description="""Index for tags.""") - timeseries: Optional[str] = Field(None, description="""An index into a TimeSeries object.""") - timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""") + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -137,6 +171,17 @@ class TimeIntervalsTimeseries(TimeSeriesReferenceVectorData): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["timeseries"] = Field("timeseries") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py index 7f5cacf..87181f0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_file.py @@ -1,23 +1,49 @@ from __future__ import annotations - -import sys -from datetime import datetime +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,20 +53,40 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable +from .core_nwb_misc import Units + +from .core_nwb_icephys import ( + RepetitionsTable, + ExperimentalConditionsTable, + SimultaneousRecordingsTable, + IntracellularRecordingsTable, + SweepTable, + SequentialRecordingsTable, + IntracellularElectrode, +) + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + from .core_nwb_base import ( - Images, NWBContainer, NWBData, - NWBDataInterface, - ProcessingModule, TimeSeries, + Images, + ProcessingModule, + NWBDataInterface, ) -from .core_nwb_device import Device -from .core_nwb_epoch import TimeIntervals -from .core_nwb_ogen import OptogeneticStimulusSite + from .core_nwb_ophys import ImagingPlane +from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable + +from .core_nwb_ogen import OptogeneticStimulusSite + + metamodel_version = "None" version = "2.5.0" @@ -59,7 +105,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -171,9 +217,11 @@ class NWBFileStimulus(ConfiguredBaseModel): presentation: Optional[List[TimeSeries] | TimeSeries] = Field( default_factory=dict, description="""Stimuli presented during the experiment.""" ) - templates: Optional[List[Union[Images, TimeSeries]] | Union[Images, TimeSeries]] = Field( - default_factory=dict, - description="""Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame.""", + templates: Optional[List[Union[Images, TimeSeries]] | Union[Images, TimeSeries]] = ( + Field( + default_factory=dict, + description="""Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame.""", + ) ) @@ -200,7 +248,9 @@ class NWBFileGeneral(ConfiguredBaseModel): keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( None, description="""Terms to search over.""" ) - lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""") + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) notes: Optional[str] = Field(None, description="""Notes about the experiment.""") pharmacology: Optional[str] = Field( None, @@ -213,7 +263,9 @@ class NWBFileGeneral(ConfiguredBaseModel): related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( None, description="""Publication information. PMID, DOI, URL, etc.""" ) - session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""") + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) slices: Optional[str] = Field( None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", @@ -252,9 +304,11 @@ class NWBFileGeneral(ConfiguredBaseModel): intracellular_ephys: Optional[str] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) - optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field( - default_factory=dict, - description="""Metadata describing optogenetic stimuluation.""", + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) ) optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( default_factory=dict, description="""Metadata related to optophysiology.""" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py index 399a503..4e6e0f1 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_icephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,17 +54,22 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( - AlignedDynamicTable, - DynamicTable, DynamicTableRegion, + AlignedDynamicTable, VectorIndex, + VectorData, + DynamicTable, ) + from .core_nwb_base import ( + TimeSeriesStartingTime, + TimeSeriesReferenceVectorData, NWBContainer, TimeSeries, - TimeSeriesReferenceVectorData, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.5.0" @@ -56,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -94,7 +126,9 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -143,8 +177,12 @@ class CurrentClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded voltage.""") - bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) @@ -159,7 +197,9 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -211,8 +251,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", ) - bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) @@ -225,7 +269,9 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -271,7 +317,9 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -320,8 +368,12 @@ class VoltageClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded current.""") - capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""") - capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) resistance_comp_bandwidth: Optional[str] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -348,7 +400,9 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -479,7 +533,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp") + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) unit: Optional[str] = Field( None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", @@ -506,7 +562,9 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -559,7 +617,9 @@ class IntracellularElectrode(NWBContainer): ..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", ) - filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""") + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) initial_access_resistance: Optional[str] = Field( None, description="""Initial access resistance.""" ) @@ -567,8 +627,12 @@ class IntracellularElectrode(NWBContainer): None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) - resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""") - seal: Optional[str] = Field(None, description="""Information about seal used for recording.""") + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) @@ -694,6 +758,17 @@ class IntracellularStimuliTableStimulus(TimeSeriesReferenceVectorData): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["stimulus"] = Field("stimulus") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -742,6 +817,17 @@ class IntracellularResponsesTableResponse(TimeSeriesReferenceVectorData): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["response"] = Field("response") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -778,9 +864,9 @@ class IntracellularRecordingsTable(AlignedDynamicTable): ..., description="""Table for storing intracellular response related metadata.""", ) - children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = ( - Field(default_factory=dict) - ) + children: Optional[ + List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable] + ] = Field(default_factory=dict) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -806,7 +892,9 @@ class SimultaneousRecordingsTable(DynamicTable): ..., description="""A reference to one or more rows in the IntracellularRecordingsTable table.""", ) - recordings_index: str = Field(..., description="""Index dataset for the recordings column.""") + recordings_index: str = Field( + ..., description="""Index dataset for the recordings column.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -937,7 +1025,9 @@ class SequentialRecordingsTableSimultaneousRecordingsIndex(VectorIndex): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["simultaneous_recordings_index"] = Field("simultaneous_recordings_index") + name: Literal["simultaneous_recordings_index"] = Field( + "simultaneous_recordings_index" + ) target: Optional[str] = Field( None, description="""Reference to the target dataset that this index applies to.""", @@ -1045,7 +1135,9 @@ class ExperimentalConditionsTable(DynamicTable): ..., description="""A reference to one or more rows in the RepetitionsTable table.""", ) - repetitions_index: str = Field(..., description="""Index dataset for the repetitions column.""") + repetitions_index: str = Field( + ..., description="""Index dataset for the repetitions column.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py index 2e046c9..9ed4d15 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_image.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,7 +53,8 @@ if TYPE_CHECKING: import numpy as np -from .core_nwb_base import Image, TimeSeries +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + metamodel_version = "None" version = "2.5.0" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -74,7 +103,9 @@ class GrayscaleImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -94,7 +125,9 @@ class RGBImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -114,7 +147,9 @@ class RGBAImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -149,7 +184,9 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -215,7 +252,9 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -264,7 +303,9 @@ class OpticalSeries(ImageSeries): data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") + ] = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -280,7 +321,9 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -318,7 +361,9 @@ class IndexSeries(TimeSeries): ..., description="""Index of the image (using zero-indexing) in the linked Images object.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py index 76411c2..a3b4271 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_misc.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,12 +54,16 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, - VectorData, VectorIndex, + VectorData, + DynamicTable, ) -from .core_nwb_base import TimeSeries + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeries, TimeSeriesSync + metamodel_version = "None" version = "2.5.0" @@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries): ..., description="""Description of the features represented in TimeSeries::data.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries): data: NDArray[Shape["* num_times"], str] = Field( ..., description="""Annotations made during an experiment.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -214,7 +251,9 @@ class DecompositionSeries(TimeSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Data decomposed into frequency bands.""") - metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) source_channels: Optional[str] = Field( None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", @@ -223,7 +262,9 @@ class DecompositionSeries(TimeSeries): ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -261,7 +302,9 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) class DecompositionSeriesSourceChannels(DynamicTableRegion): @@ -331,18 +374,22 @@ class Units(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Units") spike_times_index: Optional[str] = Field( None, description="""Index into the spike_times dataset.""" ) - spike_times: Optional[str] = Field(None, description="""Spike times for each unit.""") + spike_times: Optional[str] = Field( + None, description="""Spike times for each unit.""" + ) obs_intervals_index: Optional[str] = Field( None, description="""Index into the obs_intervals dataset.""" ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field( - None, description="""Observation intervals for each unit.""" + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" ) - electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""") electrodes: Optional[str] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", @@ -362,10 +409,14 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + ) ) waveforms_index: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py index 265fac3..ae74333 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ogen.py @@ -1,25 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -27,8 +56,11 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBContainer, TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.5.0" @@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries): data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py index fd98314..4f7c349 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_ophys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,17 +53,23 @@ if TYPE_CHECKING: import numpy as np +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + from ...hdmf_common.v1_5_0.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, + VectorData, VectorIndex, + DynamicTable, ) + from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, - NWBDataInterface, TimeSeries, + NWBDataInterface, + TimeSeriesSync, ) -from .core_nwb_image import ImageSeries + metamodel_version = "None" version = "2.5.0" @@ -56,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -114,7 +147,9 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -156,7 +191,9 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -213,7 +250,9 @@ class DfOverF(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -223,7 +262,9 @@ class Fluorescence(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -233,7 +274,9 @@ class ImageSegmentation(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(default_factory=dict) + children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field( + default_factory=dict + ) name: str = Field(...) @@ -253,14 +296,18 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) - pixel_mask_index: Optional[str] = Field(None, description="""Index into pixel_mask.""") - pixel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + pixel_mask_index: Optional[str] = Field( + None, description="""Index into pixel_mask.""" + ) + pixel_mask: Optional[str] = Field( + None, description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) - voxel_mask_index: Optional[str] = Field(None, description="""Index into voxel_mask.""") - voxel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + voxel_mask_index: Optional[str] = Field( + None, description="""Index into voxel_mask.""" + ) + voxel_mask: Optional[str] = Field( + None, description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) reference_images: Optional[List[ImageSeries] | ImageSeries] = Field( @@ -308,6 +355,29 @@ class PlaneSegmentationPixelMaskIndex(VectorIndex): ] = Field(None) +class PlaneSegmentationPixelMask(VectorData): + """ + Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["pixel_mask"] = Field("pixel_mask") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class PlaneSegmentationVoxelMaskIndex(VectorIndex): """ Index into voxel_mask. @@ -332,13 +402,39 @@ class PlaneSegmentationVoxelMaskIndex(VectorIndex): ] = Field(None) +class PlaneSegmentationVoxelMask(VectorData): + """ + Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["voxel_mask"] = Field("voxel_mask") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class ImagingPlane(NWBContainer): """ An imaging plane and its metadata. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[OpticalChannel] | OpticalChannel] = Field(default_factory=dict) + children: Optional[List[OpticalChannel] | OpticalChannel] = Field( + default_factory=dict + ) name: str = Field(...) @@ -349,8 +445,12 @@ class OpticalChannel(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") + description: str = Field( + ..., description="""Description or other notes about the channel.""" + ) + emission_lambda: float = Field( + ..., description="""Emission wavelength for channel, in nm.""" + ) class MotionCorrection(NWBDataInterface): @@ -392,7 +492,9 @@ Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() PlaneSegmentation.model_rebuild() PlaneSegmentationPixelMaskIndex.model_rebuild() +PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMaskIndex.model_rebuild() +PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py index 019640d..094eb44 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/core_nwb_retinotopy.py @@ -1,20 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,6 +55,7 @@ if TYPE_CHECKING: from .core_nwb_base import NWBDataInterface + metamodel_version = "None" version = "2.5.0" @@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ImagingRetinotopy") axis_1_phase_map: str = Field( ..., description="""Phase response to stimulus on the first measured axis.""" ) @@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) @@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py index 8f8d55f..6146cc5 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_5_0/namespace.py @@ -1,23 +1,252 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesEntities, + ExternalResourcesResources, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) + +from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix + +from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer + +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData + +from .core_nwb_retinotopy import ( + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopySignMap, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_base import ( + NWBData, + TimeSeriesReferenceVectorData, + Image, + ImageReferences, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, + ImagesOrderOfImages, +) + +from .core_nwb_ophys import ( + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + PlaneSegmentation, + PlaneSegmentationPixelMaskIndex, + PlaneSegmentationPixelMask, + PlaneSegmentationVoxelMaskIndex, + PlaneSegmentationVoxelMask, + ImagingPlane, + OpticalChannel, + MotionCorrection, + CorrectedImageStack, +) + +from .core_nwb_device import Device + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, + IntracellularElectrodesTable, + IntracellularStimuliTable, + IntracellularStimuliTableStimulus, + IntracellularResponsesTable, + IntracellularResponsesTableResponse, + IntracellularRecordingsTable, + SimultaneousRecordingsTable, + SimultaneousRecordingsTableRecordings, + SimultaneousRecordingsTableRecordingsIndex, + SequentialRecordingsTable, + SequentialRecordingsTableSimultaneousRecordings, + SequentialRecordingsTableSimultaneousRecordingsIndex, + RepetitionsTable, + RepetitionsTableSequentialRecordings, + RepetitionsTableSequentialRecordingsIndex, + ExperimentalConditionsTable, + ExperimentalConditionsTableRepetitions, + ExperimentalConditionsTableRepetitionsIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesSourceChannels, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, + UnitsWaveformsIndex, + UnitsWaveformsIndexIndex, +) + +from .core_nwb_file import ( + ScratchData, + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, + LabMetaData, + Subject, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + metamodel_version = "None" version = "2.5.0" @@ -36,7 +265,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py index acc6bf8..79f6947 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_base.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,8 +53,10 @@ if TYPE_CHECKING: import numpy as np +from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable + from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, VectorData + metamodel_version = "None" version = "2.6.0-alpha" @@ -47,7 +76,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -81,7 +110,18 @@ class TimeSeriesReferenceVectorData(VectorData): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("timeseries") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -105,7 +145,9 @@ class Image(NWBData): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -153,7 +195,9 @@ class TimeSeries(NWBDataInterface): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -264,7 +308,7 @@ class Images(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Images") description: Optional[str] = Field( None, description="""Description of this collection of images.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py index 85bd347..0f30adc 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_behavior.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -28,10 +55,14 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, TimeSeries, ) + from .core_nwb_misc import IntervalSeries + metamodel_version = "None" version = "2.6.0-alpha" @@ -50,7 +81,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -84,7 +115,9 @@ class SpatialSeries(TimeSeries): None, description="""Description defining what exactly 'straight-ahead' means.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -138,7 +171,9 @@ class BehavioralEpochs(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[IntervalSeries] | IntervalSeries] = Field(default_factory=dict) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -178,7 +213,9 @@ class EyeTracking(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -188,7 +225,9 @@ class CompassDirection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -198,7 +237,9 @@ class Position(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpatialSeries] | SpatialSeries] = Field(default_factory=dict) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) name: str = Field(...) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py index f835619..066ae88 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_device.py @@ -1,26 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .core_nwb_base import NWBContainer + metamodel_version = "None" version = "2.6.0-alpha" @@ -39,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py index 06000ff..05299ef 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ecephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,13 +53,17 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTableRegion +from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable, DynamicTableRegion + from .core_nwb_base import ( - NWBContainer, + TimeSeriesStartingTime, NWBDataInterface, + TimeSeriesSync, TimeSeries, + NWBContainer, ) + metamodel_version = "None" version = "2.6.0-alpha" @@ -51,7 +82,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -94,7 +125,9 @@ class ElectricalSeries(TimeSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -172,7 +205,9 @@ class SpikeEventSeries(ElectricalSeries): None, description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -201,14 +236,16 @@ class FeatureExtraction(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("FeatureExtraction") description: NDArray[Shape["* num_features"], str] = Field( ..., description="""Description of features (eg, ''PC1'') for each of the extracted features.""", ) - features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = Field( - ..., - description="""Multi-dimensional array of features extracted from each event.""", + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) ) times: NDArray[Shape["* num_events"], float] = Field( ..., @@ -250,7 +287,7 @@ class EventDetection(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("EventDetection") detection_method: str = Field( ..., description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", @@ -270,7 +307,9 @@ class EventWaveform(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field(default_factory=dict) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -280,7 +319,9 @@ class FilteredEphys(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -290,7 +331,9 @@ class LFP(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field(default_factory=dict) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -301,23 +344,37 @@ class ElectrodeGroup(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: Optional[str] = Field(None, description="""Description of this electrode group.""") + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) location: Optional[str] = Field( None, description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", ) - position: Optional[Any] = Field( + position: Optional[str] = Field( None, description="""stereotaxic or common framework coordinates""" ) +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + class ClusterWaveforms(NWBDataInterface): """ DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ClusterWaveforms") waveform_filtering: str = Field( ..., description="""Filtering applied to data before generating mean/sd""" ) @@ -337,7 +394,7 @@ class Clustering(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Clustering") description: str = Field( ..., description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", @@ -367,5 +424,6 @@ EventWaveform.model_rebuild() FilteredEphys.model_rebuild() LFP.model_rebuild() ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() ClusterWaveforms.model_rebuild() Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py index a94477b..ad88cf7 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_epoch.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,10 +54,13 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorData, DynamicTable, VectorIndex, ) -from .core_nwb_base import TimeSeriesReferenceVectorData + +from .core_nwb_base import TimeSeriesReferenceVectorData, TimeSeries + metamodel_version = "None" version = "2.6.0-alpha" @@ -50,7 +80,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -87,8 +117,12 @@ class TimeIntervals(DynamicTable): description="""User-defined tags that identify or categorize events.""", ) tags_index: Optional[str] = Field(None, description="""Index for tags.""") - timeseries: Optional[str] = Field(None, description="""An index into a TimeSeries object.""") - timeseries_index: Optional[str] = Field(None, description="""Index for timeseries.""") + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -137,6 +171,17 @@ class TimeIntervalsTimeseries(TimeSeriesReferenceVectorData): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["timeseries"] = Field("timeseries") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py index 5ef83cc..4041fbb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_file.py @@ -1,23 +1,49 @@ from __future__ import annotations - -import sys -from datetime import datetime +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,20 +53,40 @@ if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_table import DynamicTable +from .core_nwb_misc import Units + +from .core_nwb_icephys import ( + RepetitionsTable, + ExperimentalConditionsTable, + SimultaneousRecordingsTable, + IntracellularRecordingsTable, + SweepTable, + SequentialRecordingsTable, + IntracellularElectrode, +) + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + from .core_nwb_base import ( - Images, NWBContainer, NWBData, - NWBDataInterface, - ProcessingModule, TimeSeries, + Images, + ProcessingModule, + NWBDataInterface, ) -from .core_nwb_device import Device -from .core_nwb_epoch import TimeIntervals -from .core_nwb_ogen import OptogeneticStimulusSite + from .core_nwb_ophys import ImagingPlane +from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData, DynamicTable + +from .core_nwb_ogen import OptogeneticStimulusSite + + metamodel_version = "None" version = "2.6.0-alpha" @@ -59,7 +105,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -171,9 +217,11 @@ class NWBFileStimulus(ConfiguredBaseModel): presentation: Optional[List[TimeSeries] | TimeSeries] = Field( default_factory=dict, description="""Stimuli presented during the experiment.""" ) - templates: Optional[List[Union[Images, TimeSeries]] | Union[Images, TimeSeries]] = Field( - default_factory=dict, - description="""Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame.""", + templates: Optional[List[Union[Images, TimeSeries]] | Union[Images, TimeSeries]] = ( + Field( + default_factory=dict, + description="""Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame.""", + ) ) @@ -200,7 +248,9 @@ class NWBFileGeneral(ConfiguredBaseModel): keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( None, description="""Terms to search over.""" ) - lab: Optional[str] = Field(None, description="""Laboratory where experiment was performed.""") + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) notes: Optional[str] = Field(None, description="""Notes about the experiment.""") pharmacology: Optional[str] = Field( None, @@ -213,7 +263,9 @@ class NWBFileGeneral(ConfiguredBaseModel): related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( None, description="""Publication information. PMID, DOI, URL, etc.""" ) - session_id: Optional[str] = Field(None, description="""Lab-specific ID for the session.""") + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) slices: Optional[str] = Field( None, description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", @@ -252,9 +304,11 @@ class NWBFileGeneral(ConfiguredBaseModel): intracellular_ephys: Optional[str] = Field( None, description="""Metadata related to intracellular electrophysiology.""" ) - optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = Field( - default_factory=dict, - description="""Metadata describing optogenetic stimuluation.""", + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) ) optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( default_factory=dict, description="""Metadata related to optophysiology.""" diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py index 2a8c8fc..0d69cde 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_icephys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,17 +54,22 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( - AlignedDynamicTable, - DynamicTable, DynamicTableRegion, + AlignedDynamicTable, VectorIndex, + VectorData, + DynamicTable, ) + from .core_nwb_base import ( + TimeSeriesStartingTime, + TimeSeriesReferenceVectorData, NWBContainer, TimeSeries, - TimeSeriesReferenceVectorData, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.6.0-alpha" @@ -56,7 +88,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -94,7 +126,9 @@ class PatchClampSeries(TimeSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -143,8 +177,12 @@ class CurrentClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded voltage.""") - bias_current: Optional[float] = Field(None, description="""Bias current, in amps.""") - bridge_balance: Optional[float] = Field(None, description="""Bridge balance, in ohms.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) capacitance_compensation: Optional[float] = Field( None, description="""Capacitance compensation, in farads.""" ) @@ -159,7 +197,9 @@ class CurrentClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -211,8 +251,12 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", ) - bias_current: float = Field(..., description="""Bias current, in amps, fixed to 0.0.""") - bridge_balance: float = Field(..., description="""Bridge balance, in ohms, fixed to 0.0.""") + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) capacitance_compensation: float = Field( ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" ) @@ -225,7 +269,9 @@ class IZeroClampSeries(CurrentClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -271,7 +317,9 @@ class CurrentClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -320,8 +368,12 @@ class VoltageClampSeries(PatchClampSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Recorded current.""") - capacitance_fast: Optional[str] = Field(None, description="""Fast capacitance, in farads.""") - capacitance_slow: Optional[str] = Field(None, description="""Slow capacitance, in farads.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) resistance_comp_bandwidth: Optional[str] = Field( None, description="""Resistance compensation bandwidth, in hertz.""" ) @@ -348,7 +400,9 @@ class VoltageClampSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -479,7 +533,9 @@ class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["whole_cell_series_resistance_comp"] = Field("whole_cell_series_resistance_comp") + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) unit: Optional[str] = Field( None, description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", @@ -506,7 +562,9 @@ class VoltageClampStimulusSeries(PatchClampSeries): None, description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -559,7 +617,9 @@ class IntracellularElectrode(NWBContainer): ..., description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", ) - filtering: Optional[str] = Field(None, description="""Electrode specific filtering.""") + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) initial_access_resistance: Optional[str] = Field( None, description="""Initial access resistance.""" ) @@ -567,8 +627,12 @@ class IntracellularElectrode(NWBContainer): None, description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", ) - resistance: Optional[str] = Field(None, description="""Electrode resistance, in ohms.""") - seal: Optional[str] = Field(None, description="""Information about seal used for recording.""") + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) slice: Optional[str] = Field( None, description="""Information about slice used for recording.""" ) @@ -694,6 +758,17 @@ class IntracellularStimuliTableStimulus(TimeSeriesReferenceVectorData): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["stimulus"] = Field("stimulus") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -742,6 +817,17 @@ class IntracellularResponsesTableResponse(TimeSeriesReferenceVectorData): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) name: Literal["response"] = Field("response") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) description: Optional[str] = Field( None, description="""Description of what these vectors represent.""" ) @@ -778,9 +864,9 @@ class IntracellularRecordingsTable(AlignedDynamicTable): ..., description="""Table for storing intracellular response related metadata.""", ) - children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = ( - Field(default_factory=dict) - ) + children: Optional[ + List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable] + ] = Field(default_factory=dict) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -806,7 +892,9 @@ class SimultaneousRecordingsTable(DynamicTable): ..., description="""A reference to one or more rows in the IntracellularRecordingsTable table.""", ) - recordings_index: str = Field(..., description="""Index dataset for the recordings column.""") + recordings_index: str = Field( + ..., description="""Index dataset for the recordings column.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", @@ -937,7 +1025,9 @@ class SequentialRecordingsTableSimultaneousRecordingsIndex(VectorIndex): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - name: Literal["simultaneous_recordings_index"] = Field("simultaneous_recordings_index") + name: Literal["simultaneous_recordings_index"] = Field( + "simultaneous_recordings_index" + ) target: Optional[str] = Field( None, description="""Reference to the target dataset that this index applies to.""", @@ -1045,7 +1135,9 @@ class ExperimentalConditionsTable(DynamicTable): ..., description="""A reference to one or more rows in the RepetitionsTable table.""", ) - repetitions_index: str = Field(..., description="""Index dataset for the repetitions column.""") + repetitions_index: str = Field( + ..., description="""Index dataset for the repetitions column.""" + ) colnames: Optional[str] = Field( None, description="""The names of the columns in this table. This should be used to specify an order to the columns.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py index 6e89b31..f91e99b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_image.py @@ -1,21 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -25,7 +53,8 @@ if TYPE_CHECKING: import numpy as np -from .core_nwb_base import Image, TimeSeries +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + metamodel_version = "None" version = "2.6.0-alpha" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -74,7 +103,9 @@ class GrayscaleImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -94,7 +125,9 @@ class RGBImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -114,7 +147,9 @@ class RGBAImage(Image): resolution: Optional[float] = Field( None, description="""Pixel resolution of the image, in pixels per centimeter.""" ) - description: Optional[str] = Field(None, description="""Description of the image.""") + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) array: Optional[ Union[ NDArray[Shape["* x, * y"], float], @@ -149,7 +184,9 @@ class ImageSeries(TimeSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -215,7 +252,9 @@ class ImageMaskSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -264,7 +303,9 @@ class OpticalSeries(ImageSeries): data: Union[ NDArray[Shape["* frame, * x, * y"], float], NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float], - ] = Field(..., description="""Images presented to subject, either grayscale or RGB""") + ] = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) orientation: Optional[str] = Field( None, description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", @@ -280,7 +321,9 @@ class OpticalSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -318,7 +361,9 @@ class IndexSeries(TimeSeries): ..., description="""Index of the image (using zero-indexing) in the linked Images object.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py index d7563f0..0e20c78 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_misc.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -27,12 +54,16 @@ if TYPE_CHECKING: from ...hdmf_common.v1_5_0.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, - VectorData, VectorIndex, + VectorData, + DynamicTable, ) -from .core_nwb_base import TimeSeries + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeries, TimeSeriesSync + metamodel_version = "None" version = "2.6.0-alpha" @@ -52,7 +83,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -86,7 +117,9 @@ class AbstractFeatureSeries(TimeSeries): ..., description="""Description of the features represented in TimeSeries::data.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -142,7 +175,9 @@ class AnnotationSeries(TimeSeries): data: NDArray[Shape["* num_times"], str] = Field( ..., description="""Annotations made during an experiment.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -179,7 +214,9 @@ class IntervalSeries(TimeSeries): data: NDArray[Shape["* num_times"], int] = Field( ..., description="""Use values >0 if interval started, <0 if interval ended.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -214,7 +251,9 @@ class DecompositionSeries(TimeSeries): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) data: str = Field(..., description="""Data decomposed into frequency bands.""") - metric: str = Field(..., description="""The metric used, e.g. phase, amplitude, power.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) source_channels: Optional[str] = Field( None, description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", @@ -223,7 +262,9 @@ class DecompositionSeries(TimeSeries): ..., description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -261,7 +302,9 @@ class DecompositionSeriesData(ConfiguredBaseModel): None, description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", ) - array: Optional[NDArray[Shape["* num_times, * num_channels, * num_bands"], float]] = Field(None) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) class DecompositionSeriesSourceChannels(DynamicTableRegion): @@ -331,7 +374,7 @@ class Units(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("Units") spike_times_index: Optional[str] = Field( None, description="""Index into the spike_times dataset.""" ) @@ -341,10 +384,12 @@ class Units(DynamicTable): obs_intervals_index: Optional[str] = Field( None, description="""Index into the obs_intervals dataset.""" ) - obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = Field( - None, description="""Observation intervals for each unit.""" + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" ) - electrodes_index: Optional[str] = Field(None, description="""Index into electrodes.""") electrodes: Optional[str] = Field( None, description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", @@ -364,10 +409,14 @@ class Units(DynamicTable): NDArray[Shape["* num_units, * num_samples"], float], NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], ] - ] = Field(None, description="""Spike waveform standard deviation for each spike unit.""") - waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = Field( - None, - description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + ) ) waveforms_index: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py index 24b1774..afa9c99 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ogen.py @@ -1,25 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -27,8 +56,11 @@ if TYPE_CHECKING: from .core_nwb_base import ( NWBContainer, TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, ) + metamodel_version = "None" version = "2.6.0-alpha" @@ -47,7 +79,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -76,7 +108,9 @@ class OptogeneticSeries(TimeSeries): data: NDArray[Shape["* num_times"], float] = Field( ..., description="""Applied power for optogenetic stimulus, in watts.""" ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -111,7 +145,9 @@ class OptogeneticStimulusSite(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) description: str = Field(..., description="""Description of stimulation site.""") - excitation_lambda: float = Field(..., description="""Excitation wavelength, in nm.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) location: str = Field( ..., description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py index 40a8a09..25ce31f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_ophys.py @@ -1,22 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,17 +53,23 @@ if TYPE_CHECKING: import numpy as np +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + from ...hdmf_common.v1_5_0.hdmf_common_table import ( - DynamicTable, DynamicTableRegion, + VectorData, VectorIndex, + DynamicTable, ) + from .core_nwb_base import ( + TimeSeriesStartingTime, NWBContainer, - NWBDataInterface, TimeSeries, + NWBDataInterface, + TimeSeriesSync, ) -from .core_nwb_image import ImageSeries + metamodel_version = "None" version = "2.6.0-alpha" @@ -56,7 +89,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -95,7 +128,9 @@ class OnePhotonSeries(ImageSeries): None, description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""", ) - power: Optional[float] = Field(None, description="""Power of the excitation in mW, if known.""") + power: Optional[float] = Field( + None, description="""Power of the excitation in mW, if known.""" + ) intensity: Optional[float] = Field( None, description="""Intensity of the excitation in mW/mm^2, if known.""" ) @@ -117,7 +152,9 @@ class OnePhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -183,7 +220,9 @@ class TwoPhotonSeries(ImageSeries): None, description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -225,7 +264,9 @@ class RoiResponseSeries(TimeSeries): ..., description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", ) - description: Optional[str] = Field(None, description="""Description of the time series.""") + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) comments: Optional[str] = Field( None, description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", @@ -282,7 +323,9 @@ class DfOverF(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -292,7 +335,9 @@ class Fluorescence(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field(default_factory=dict) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) name: str = Field(...) @@ -302,7 +347,9 @@ class ImageSegmentation(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field(default_factory=dict) + children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field( + default_factory=dict + ) name: str = Field(...) @@ -322,14 +369,18 @@ class PlaneSegmentation(DynamicTable): None, description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", ) - pixel_mask_index: Optional[str] = Field(None, description="""Index into pixel_mask.""") - pixel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + pixel_mask_index: Optional[str] = Field( + None, description="""Index into pixel_mask.""" + ) + pixel_mask: Optional[str] = Field( + None, description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) - voxel_mask_index: Optional[str] = Field(None, description="""Index into voxel_mask.""") - voxel_mask: Optional[List[Any] | Any] = Field( - default_factory=list, + voxel_mask_index: Optional[str] = Field( + None, description="""Index into voxel_mask.""" + ) + voxel_mask: Optional[str] = Field( + None, description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", ) reference_images: Optional[List[ImageSeries] | ImageSeries] = Field( @@ -377,6 +428,29 @@ class PlaneSegmentationPixelMaskIndex(VectorIndex): ] = Field(None) +class PlaneSegmentationPixelMask(VectorData): + """ + Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["pixel_mask"] = Field("pixel_mask") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class PlaneSegmentationVoxelMaskIndex(VectorIndex): """ Index into voxel_mask. @@ -401,13 +475,39 @@ class PlaneSegmentationVoxelMaskIndex(VectorIndex): ] = Field(None) +class PlaneSegmentationVoxelMask(VectorData): + """ + Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["voxel_mask"] = Field("voxel_mask") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + class ImagingPlane(NWBContainer): """ An imaging plane and its metadata. """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[OpticalChannel] | OpticalChannel] = Field(default_factory=dict) + children: Optional[List[OpticalChannel] | OpticalChannel] = Field( + default_factory=dict + ) name: str = Field(...) @@ -418,8 +518,12 @@ class OpticalChannel(NWBContainer): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - description: str = Field(..., description="""Description or other notes about the channel.""") - emission_lambda: float = Field(..., description="""Emission wavelength for channel, in nm.""") + description: str = Field( + ..., description="""Description or other notes about the channel.""" + ) + emission_lambda: float = Field( + ..., description="""Emission wavelength for channel, in nm.""" + ) class MotionCorrection(NWBDataInterface): @@ -462,7 +566,9 @@ Fluorescence.model_rebuild() ImageSegmentation.model_rebuild() PlaneSegmentation.model_rebuild() PlaneSegmentationPixelMaskIndex.model_rebuild() +PlaneSegmentationPixelMask.model_rebuild() PlaneSegmentationVoxelMaskIndex.model_rebuild() +PlaneSegmentationVoxelMask.model_rebuild() ImagingPlane.model_rebuild() OpticalChannel.model_rebuild() MotionCorrection.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py index 9a391ca..6ba2d7e 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/core_nwb_retinotopy.py @@ -1,20 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -26,6 +55,7 @@ if TYPE_CHECKING: from .core_nwb_base import NWBDataInterface + metamodel_version = "None" version = "2.6.0-alpha" @@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -69,7 +99,7 @@ class ImagingRetinotopy(NWBDataInterface): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("ImagingRetinotopy") axis_1_phase_map: str = Field( ..., description="""Phase response to stimulus on the first measured axis.""" ) @@ -113,7 +143,9 @@ class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -131,7 +163,9 @@ class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -149,7 +183,9 @@ class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -167,7 +203,9 @@ class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) unit: Optional[str] = Field( None, description="""Unit that axis data is stored in (e.g., degrees).""" ) @@ -189,8 +227,12 @@ class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") - focal_depth: Optional[float] = Field(None, description="""Focal depth offset, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) @@ -208,7 +250,9 @@ class ImagingRetinotopySignMap(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) @@ -227,7 +271,9 @@ class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): None, description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", ) - field_of_view: Optional[float] = Field(None, description="""Size of viewing area, in meters.""") + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) format: Optional[str] = Field( None, description="""Format of image. Right now only 'raw' is supported.""" ) diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py index e9ec8cc..88c4dae 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_6_0_alpha/namespace.py @@ -1,23 +1,254 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from ...hdmf_experimental.v0_1_0.hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesEntities, + ExternalResourcesResources, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) + +from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix + +from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer + +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +from ...hdmf_experimental.v0_1_0.hdmf_experimental_experimental import EnumData + +from .core_nwb_retinotopy import ( + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopySignMap, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_base import ( + NWBData, + TimeSeriesReferenceVectorData, + Image, + ImageReferences, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, + ImagesOrderOfImages, +) + +from .core_nwb_ophys import ( + OnePhotonSeries, + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + PlaneSegmentation, + PlaneSegmentationPixelMaskIndex, + PlaneSegmentationPixelMask, + PlaneSegmentationVoxelMaskIndex, + PlaneSegmentationVoxelMask, + ImagingPlane, + OpticalChannel, + MotionCorrection, + CorrectedImageStack, +) + +from .core_nwb_device import Device + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, + IntracellularElectrodesTable, + IntracellularStimuliTable, + IntracellularStimuliTableStimulus, + IntracellularResponsesTable, + IntracellularResponsesTableResponse, + IntracellularRecordingsTable, + SimultaneousRecordingsTable, + SimultaneousRecordingsTableRecordings, + SimultaneousRecordingsTableRecordingsIndex, + SequentialRecordingsTable, + SequentialRecordingsTableSimultaneousRecordings, + SequentialRecordingsTableSimultaneousRecordingsIndex, + RepetitionsTable, + RepetitionsTableSequentialRecordings, + RepetitionsTableSequentialRecordingsIndex, + ExperimentalConditionsTable, + ExperimentalConditionsTableRepetitions, + ExperimentalConditionsTableRepetitionsIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesSourceChannels, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, + UnitsWaveformsIndex, + UnitsWaveformsIndexIndex, +) + +from .core_nwb_file import ( + ScratchData, + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, + LabMetaData, + Subject, + SubjectAge, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + metamodel_version = "None" version = "2.6.0-alpha" @@ -36,7 +267,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/__init__.py new file mode 100644 index 0000000..0519ecb --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py new file mode 100644 index 0000000..46d1599 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_base.py @@ -0,0 +1,351 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from ...hdmf_common.v1_8_0.hdmf_common_table import VectorData, DynamicTable + +from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class NWBData(Data): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + + +class TimeSeriesReferenceVectorData(VectorData): + """ + Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field("timeseries") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class Image(NWBData): + """ + An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + resolution: Optional[float] = Field( + None, description="""Pixel resolution of the image, in pixels per centimeter.""" + ) + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r, g, b"], float], + NDArray[Shape["* x, * y, 4 r, g, b, a"], float], + ] + ] = Field(None) + + +class ImageReferences(NWBData): + """ + Ordered dataset of references to Image objects. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + image: List[str] | str = Field( + default_factory=list, + description="""Ordered dataset of references to Image objects.""", + ) + + +class NWBContainer(Container): + """ + An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + + +class NWBDataInterface(NWBContainer): + """ + An abstract data type for a generic container storing collections of data, as opposed to metadata. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + + +class TimeSeries(NWBDataInterface): + """ + General purpose time series. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + data: str = Field( + ..., + description="""Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class TimeSeriesData(ConfiguredBaseModel): + """ + Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["data"] = Field("data") + conversion: Optional[float] = Field( + None, + description="""Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.""", + ) + offset: Optional[float] = Field( + None, + description="""Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.""", + ) + resolution: Optional[float] = Field( + None, + description="""Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.""", + ) + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + continuity: Optional[str] = Field( + None, + description="""Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.""", + ) + array: Optional[ + Union[ + NDArray[Shape["* num_times"], Any], + NDArray[Shape["* num_times, * num_DIM2"], Any], + NDArray[Shape["* num_times, * num_DIM2, * num_DIM3"], Any], + NDArray[Shape["* num_times, * num_DIM2, * num_DIM3, * num_DIM4"], Any], + ] + ] = Field(None) + + +class TimeSeriesStartingTime(ConfiguredBaseModel): + """ + Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["starting_time"] = Field("starting_time") + rate: Optional[float] = Field(None, description="""Sampling rate, in Hz.""") + unit: Optional[str] = Field( + None, + description="""Unit of measurement for time, which is fixed to 'seconds'.""", + ) + value: float = Field(...) + + +class TimeSeriesSync(ConfiguredBaseModel): + """ + Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["sync"] = Field("sync") + + +class ProcessingModule(NWBContainer): + """ + A collection of processed data. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[ + List[Union[BaseModel, DynamicTable, NWBDataInterface]] + | Union[BaseModel, DynamicTable, NWBDataInterface] + ] = Field(default_factory=dict) + name: str = Field(...) + + +class Images(NWBDataInterface): + """ + A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field("Images") + description: Optional[str] = Field( + None, description="""Description of this collection of images.""" + ) + image: List[str] | str = Field( + default_factory=list, description="""Images stored in this collection.""" + ) + order_of_images: Optional[str] = Field( + None, + description="""Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.""", + ) + + +class ImagesOrderOfImages(ImageReferences): + """ + Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["order_of_images"] = Field("order_of_images") + image: List[str] | str = Field( + default_factory=list, + description="""Ordered dataset of references to Image objects.""", + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +NWBData.model_rebuild() +TimeSeriesReferenceVectorData.model_rebuild() +Image.model_rebuild() +ImageReferences.model_rebuild() +NWBContainer.model_rebuild() +NWBDataInterface.model_rebuild() +TimeSeries.model_rebuild() +TimeSeriesData.model_rebuild() +TimeSeriesStartingTime.model_rebuild() +TimeSeriesSync.model_rebuild() +ProcessingModule.model_rebuild() +Images.model_rebuild() +ImagesOrderOfImages.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py new file mode 100644 index 0000000..f6c95b3 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_behavior.py @@ -0,0 +1,256 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .core_nwb_base import ( + NWBDataInterface, + TimeSeriesStartingTime, + TimeSeriesSync, + TimeSeries, +) + +from .core_nwb_misc import IntervalSeries + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class SpatialSeries(TimeSeries): + """ + Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: str = Field( + ..., + description="""1-D or 2-D array storing position or direction relative to some reference frame.""", + ) + reference_frame: Optional[str] = Field( + None, + description="""Description defining what exactly 'straight-ahead' means.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class SpatialSeriesData(ConfiguredBaseModel): + """ + 1-D or 2-D array storing position or direction relative to some reference frame. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["data"] = Field("data") + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + array: Optional[ + Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, 1 x"], float], + NDArray[Shape["* num_times, 2 x,y"], float], + NDArray[Shape["* num_times, 3 x,y,z"], float], + ] + ] = Field(None) + + +class BehavioralEpochs(NWBDataInterface): + """ + TimeSeries for storing behavioral epochs. The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[IntervalSeries] | IntervalSeries] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class BehavioralEvents(NWBDataInterface): + """ + TimeSeries for storing behavioral events. See description of BehavioralEpochs for more details. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[TimeSeries] | TimeSeries] = Field(default_factory=dict) + name: str = Field(...) + + +class BehavioralTimeSeries(NWBDataInterface): + """ + TimeSeries for storing Behavoioral time series data. See description of BehavioralEpochs for more details. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[TimeSeries] | TimeSeries] = Field(default_factory=dict) + name: str = Field(...) + + +class PupilTracking(NWBDataInterface): + """ + Eye-tracking data, representing pupil size. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[TimeSeries] | TimeSeries] = Field(default_factory=dict) + name: str = Field(...) + + +class EyeTracking(NWBDataInterface): + """ + Eye-tracking data, representing direction of gaze. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class CompassDirection(NWBDataInterface): + """ + With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class Position(NWBDataInterface): + """ + Position data, whether along the x, x/y or x/y/z axis. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[SpatialSeries] | SpatialSeries] = Field( + default_factory=dict + ) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +SpatialSeries.model_rebuild() +SpatialSeriesData.model_rebuild() +BehavioralEpochs.model_rebuild() +BehavioralEvents.model_rebuild() +BehavioralTimeSeries.model_rebuild() +PupilTracking.model_rebuild() +EyeTracking.model_rebuild() +CompassDirection.model_rebuild() +Position.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py new file mode 100644 index 0000000..fa8f8e4 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_device.py @@ -0,0 +1,114 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .core_nwb_base import NWBContainer + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class Device(NWBContainer): + """ + Metadata about a data acquisition device, e.g., recording system, electrode, microscope. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + description: Optional[str] = Field( + None, + description="""Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text.""", + ) + manufacturer: Optional[str] = Field( + None, description="""The name of the manufacturer of the device.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Device.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py new file mode 100644 index 0000000..caf7b3f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ecephys.py @@ -0,0 +1,429 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from ...hdmf_common.v1_8_0.hdmf_common_table import DynamicTable, DynamicTableRegion + +from .core_nwb_base import ( + TimeSeriesStartingTime, + NWBDataInterface, + TimeSeriesSync, + TimeSeries, + NWBContainer, +) + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class ElectricalSeries(TimeSeries): + """ + A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + filtering: Optional[str] = Field( + None, + description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", + ) + data: Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_channels"], float], + NDArray[Shape["* num_times, * num_channels, * num_samples"], float], + ] = Field(..., description="""Recorded voltage data.""") + electrodes: str = Field( + ..., + description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", + ) + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class ElectricalSeriesElectrodes(DynamicTableRegion): + """ + DynamicTableRegion pointer to the electrodes that this time series was generated from. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["electrodes"] = Field("electrodes") + table: Optional[str] = Field( + None, + description="""Reference to the DynamicTable object that this region applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class SpikeEventSeries(ElectricalSeries): + """ + Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: Union[ + NDArray[Shape["* num_events, * num_channels, * num_samples"], float], + NDArray[Shape["* num_events, * num_samples"], float], + ] = Field(..., description="""Spike waveforms.""") + timestamps: NDArray[Shape["* num_times"], float] = Field( + ..., + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.""", + ) + filtering: Optional[str] = Field( + None, + description="""Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.""", + ) + electrodes: str = Field( + ..., + description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", + ) + channel_conversion: Optional[NDArray[Shape["* num_channels"], float]] = Field( + None, + description="""Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class FeatureExtraction(NWBDataInterface): + """ + Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field("FeatureExtraction") + description: NDArray[Shape["* num_features"], str] = Field( + ..., + description="""Description of features (eg, ''PC1'') for each of the extracted features.""", + ) + features: NDArray[Shape["* num_events, * num_channels, * num_features"], float] = ( + Field( + ..., + description="""Multi-dimensional array of features extracted from each event.""", + ) + ) + times: NDArray[Shape["* num_events"], float] = Field( + ..., + description="""Times of events that features correspond to (can be a link).""", + ) + electrodes: str = Field( + ..., + description="""DynamicTableRegion pointer to the electrodes that this time series was generated from.""", + ) + + +class FeatureExtractionElectrodes(DynamicTableRegion): + """ + DynamicTableRegion pointer to the electrodes that this time series was generated from. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["electrodes"] = Field("electrodes") + table: Optional[str] = Field( + None, + description="""Reference to the DynamicTable object that this region applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class EventDetection(NWBDataInterface): + """ + Detected spike events from voltage trace(s). + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field("EventDetection") + detection_method: str = Field( + ..., + description="""Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values.""", + ) + source_idx: NDArray[Shape["* num_events"], int] = Field( + ..., + description="""Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data.""", + ) + times: NDArray[Shape["* num_events"], float] = Field( + ..., description="""Timestamps of events, in seconds.""" + ) + + +class EventWaveform(NWBDataInterface): + """ + Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[SpikeEventSeries] | SpikeEventSeries] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class FilteredEphys(NWBDataInterface): + """ + Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class LFP(NWBDataInterface): + """ + LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[ElectricalSeries] | ElectricalSeries] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class ElectrodeGroup(NWBContainer): + """ + A physical grouping of electrodes, e.g. a shank of an array. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of this electrode group.""" + ) + location: Optional[str] = Field( + None, + description="""Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible.""", + ) + position: Optional[str] = Field( + None, description="""stereotaxic or common framework coordinates""" + ) + + +class ElectrodeGroupPosition(ConfiguredBaseModel): + """ + stereotaxic or common framework coordinates + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["position"] = Field("position") + x: Optional[float] = Field(None, description="""x coordinate""") + y: Optional[float] = Field(None, description="""y coordinate""") + z: Optional[float] = Field(None, description="""z coordinate""") + + +class ClusterWaveforms(NWBDataInterface): + """ + DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field("ClusterWaveforms") + waveform_filtering: str = Field( + ..., description="""Filtering applied to data before generating mean/sd""" + ) + waveform_mean: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( + ..., + description="""The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)""", + ) + waveform_sd: NDArray[Shape["* num_clusters, * num_samples"], float] = Field( + ..., + description="""Stdev of waveforms for each cluster, using the same indices as in mean""", + ) + + +class Clustering(NWBDataInterface): + """ + DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field("Clustering") + description: str = Field( + ..., + description="""Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)""", + ) + num: NDArray[Shape["* num_events"], int] = Field( + ..., description="""Cluster number of each event""" + ) + peak_over_rms: NDArray[Shape["* num_clusters"], float] = Field( + ..., + description="""Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric).""", + ) + times: NDArray[Shape["* num_events"], float] = Field( + ..., + description="""Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module.""", + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ElectricalSeries.model_rebuild() +ElectricalSeriesElectrodes.model_rebuild() +SpikeEventSeries.model_rebuild() +FeatureExtraction.model_rebuild() +FeatureExtractionElectrodes.model_rebuild() +EventDetection.model_rebuild() +EventWaveform.model_rebuild() +FilteredEphys.model_rebuild() +LFP.model_rebuild() +ElectrodeGroup.model_rebuild() +ElectrodeGroupPosition.model_rebuild() +ClusterWaveforms.model_rebuild() +Clustering.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py new file mode 100644 index 0000000..540c115 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_epoch.py @@ -0,0 +1,227 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from ...hdmf_common.v1_8_0.hdmf_common_table import ( + VectorData, + DynamicTable, + VectorIndex, +) + +from .core_nwb_base import TimeSeriesReferenceVectorData, TimeSeries + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class TimeIntervals(DynamicTable): + """ + A container for aggregating epoch data and the TimeSeries that each epoch applies to. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + start_time: Optional[List[float] | float] = Field( + default_factory=list, description="""Start time of epoch, in seconds.""" + ) + stop_time: Optional[List[float] | float] = Field( + default_factory=list, description="""Stop time of epoch, in seconds.""" + ) + tags: Optional[List[str] | str] = Field( + default_factory=list, + description="""User-defined tags that identify or categorize events.""", + ) + tags_index: Optional[str] = Field(None, description="""Index for tags.""") + timeseries: Optional[str] = Field( + None, description="""An index into a TimeSeries object.""" + ) + timeseries_index: Optional[str] = Field( + None, description="""Index for timeseries.""" + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class TimeIntervalsTagsIndex(VectorIndex): + """ + Index for tags. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["tags_index"] = Field("tags_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class TimeIntervalsTimeseries(TimeSeriesReferenceVectorData): + """ + An index into a TimeSeries object. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["timeseries"] = Field("timeseries") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class TimeIntervalsTimeseriesIndex(VectorIndex): + """ + Index for timeseries. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["timeseries_index"] = Field("timeseries_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +TimeIntervals.model_rebuild() +TimeIntervalsTagsIndex.model_rebuild() +TimeIntervalsTimeseries.model_rebuild() +TimeIntervalsTimeseriesIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py new file mode 100644 index 0000000..103ebf0 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_file.py @@ -0,0 +1,526 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .core_nwb_misc import Units + +from .core_nwb_icephys import ( + RepetitionsTable, + ExperimentalConditionsTable, + SimultaneousRecordingsTable, + IntracellularRecordingsTable, + SweepTable, + SequentialRecordingsTable, + IntracellularElectrode, +) + +from .core_nwb_device import Device + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_epoch import TimeIntervals + +from .core_nwb_base import ( + NWBContainer, + NWBData, + TimeSeries, + Images, + ProcessingModule, + NWBDataInterface, +) + +from .core_nwb_ophys import ImagingPlane + +from ...hdmf_common.v1_8_0.hdmf_common_table import VectorData, DynamicTable + +from .core_nwb_ogen import OptogeneticStimulusSite + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class ScratchData(NWBData): + """ + Any one-off datasets + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + notes: Optional[str] = Field( + None, description="""Any notes the user has about the dataset being stored""" + ) + + +class NWBFile(NWBContainer): + """ + An NWB file storing cellular-based neurophysiology data from a single experimental session. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: Literal["root"] = Field("root") + nwb_version: Optional[str] = Field( + None, + description="""File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers.""", + ) + file_create_date: NDArray[Shape["* num_modifications"], datetime] = Field( + ..., + description="""A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.""", + ) + identifier: str = Field( + ..., + description="""A unique text identifier for the file. For example, concatenated lab name, file creation date/time and experimentalist, or a hash of these and/or other values. The goal is that the string should be unique to all other files.""", + ) + session_description: str = Field( + ..., + description="""A description of the experimental session and data in the file.""", + ) + session_start_time: datetime = Field( + ..., + description="""Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds.""", + ) + timestamps_reference_time: datetime = Field( + ..., + description="""Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).""", + ) + acquisition: Optional[ + List[Union[BaseModel, DynamicTable, NWBDataInterface]] + | Union[BaseModel, DynamicTable, NWBDataInterface] + ] = Field( + default_factory=dict, + description="""Data streams recorded from the system, including ephys, ophys, tracking, etc. This group should be read-only after the experiment is completed and timestamps are corrected to a common timebase. The data stored here may be links to raw data stored in external NWB files. This will allow keeping bulky raw data out of the file while preserving the option of keeping some/all in the file. Acquired data includes tracking and experimental data streams (i.e., everything measured from the system). If bulky data is stored in the /acquisition group, the data can exist in a separate NWB file that is linked to by the file being used for processing and analysis.""", + ) + analysis: Optional[ + List[Union[BaseModel, DynamicTable, NWBContainer]] + | Union[BaseModel, DynamicTable, NWBContainer] + ] = Field( + default_factory=dict, + description="""Lab-specific and custom scientific analysis of data. There is no defined format for the content of this group - the format is up to the individual user/lab. To facilitate sharing analysis data between labs, the contents here should be stored in standard types (e.g., neurodata_types) and appropriately documented. The file can store lab-specific and custom data analysis without restriction on its form or schema, reducing data formatting restrictions on end users. Such data should be placed in the analysis group. The analysis data should be documented so that it could be shared with other labs.""", + ) + scratch: Optional[ + List[Union[BaseModel, DynamicTable, NWBContainer]] + | Union[BaseModel, DynamicTable, NWBContainer] + ] = Field( + default_factory=dict, + description="""A place to store one-off analysis results. Data placed here is not intended for sharing. By placing data here, users acknowledge that there is no guarantee that their data meets any standard.""", + ) + processing: Optional[List[ProcessingModule] | ProcessingModule] = Field( + default_factory=dict, + description="""The home for ProcessingModules. These modules perform intermediate analysis of data that is necessary to perform before scientific analysis. Examples include spike clustering, extracting position from tracking data, stitching together image slices. ProcessingModules can be large and express many data sets from relatively complex analysis (e.g., spike detection and clustering) or small, representing extraction of position information from tracking video, or even binary lick/no-lick decisions. Common software tools (e.g., klustakwik, MClust) are expected to read/write data here. 'Processing' refers to intermediate analysis of the acquired data to make it more amenable to scientific analysis.""", + ) + stimulus: str = Field( + ..., + description="""Data pushed into the system (eg, video stimulus, sound, voltage, etc) and secondary representations of that data (eg, measurements of something used as a stimulus). This group should be made read-only after experiment complete and timestamps are corrected to common timebase. Stores both presented stimuli and stimulus templates, the latter in case the same stimulus is presented multiple times, or is pulled from an external stimulus library. Stimuli are here defined as any signal that is pushed into the system as part of the experiment (eg, sound, video, voltage, etc). Many different experiments can use the same stimuli, and stimuli can be re-used during an experiment. The stimulus group is organized so that one version of template stimuli can be stored and these be used multiple times. These templates can exist in the present file or can be linked to a remote library file.""", + ) + general: str = Field( + ..., + description="""Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.""", + ) + intervals: Optional[List[TimeIntervals] | TimeIntervals] = Field( + default_factory=dict, + description="""Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.""", + ) + units: Optional[str] = Field(None, description="""Data about sorted spike units.""") + + +class NWBFileStimulus(ConfiguredBaseModel): + """ + Data pushed into the system (eg, video stimulus, sound, voltage, etc) and secondary representations of that data (eg, measurements of something used as a stimulus). This group should be made read-only after experiment complete and timestamps are corrected to common timebase. Stores both presented stimuli and stimulus templates, the latter in case the same stimulus is presented multiple times, or is pulled from an external stimulus library. Stimuli are here defined as any signal that is pushed into the system as part of the experiment (eg, sound, video, voltage, etc). Many different experiments can use the same stimuli, and stimuli can be re-used during an experiment. The stimulus group is organized so that one version of template stimuli can be stored and these be used multiple times. These templates can exist in the present file or can be linked to a remote library file. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["stimulus"] = Field("stimulus") + presentation: Optional[ + List[Union[BaseModel, DynamicTable, NWBDataInterface, TimeSeries]] + | Union[BaseModel, DynamicTable, NWBDataInterface, TimeSeries] + ] = Field( + default_factory=dict, description="""Stimuli presented during the experiment.""" + ) + templates: Optional[List[Union[Images, TimeSeries]] | Union[Images, TimeSeries]] = ( + Field( + default_factory=dict, + description="""Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame.""", + ) + ) + + +class NWBFileGeneral(ConfiguredBaseModel): + """ + Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["general"] = Field("general") + data_collection: Optional[str] = Field( + None, description="""Notes about data collection and analysis.""" + ) + experiment_description: Optional[str] = Field( + None, description="""General description of the experiment.""" + ) + experimenter: Optional[NDArray[Shape["* num_experimenters"], str]] = Field( + None, + description="""Name of person(s) who performed the experiment. Can also specify roles of different people involved.""", + ) + institution: Optional[str] = Field( + None, description="""Institution(s) where experiment was performed.""" + ) + keywords: Optional[NDArray[Shape["* num_keywords"], str]] = Field( + None, description="""Terms to search over.""" + ) + lab: Optional[str] = Field( + None, description="""Laboratory where experiment was performed.""" + ) + notes: Optional[str] = Field(None, description="""Notes about the experiment.""") + pharmacology: Optional[str] = Field( + None, + description="""Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.""", + ) + protocol: Optional[str] = Field( + None, + description="""Experimental protocol, if applicable. e.g., include IACUC protocol number.""", + ) + related_publications: Optional[NDArray[Shape["* num_publications"], str]] = Field( + None, description="""Publication information. PMID, DOI, URL, etc.""" + ) + session_id: Optional[str] = Field( + None, description="""Lab-specific ID for the session.""" + ) + slices: Optional[str] = Field( + None, + description="""Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.""", + ) + source_script: Optional[str] = Field( + None, + description="""Script file or link to public source code used to create this NWB file.""", + ) + stimulus: Optional[str] = Field( + None, + description="""Notes about stimuli, such as how and where they were presented.""", + ) + surgery: Optional[str] = Field( + None, + description="""Narrative description about surgery/surgeries, including date(s) and who performed surgery.""", + ) + virus: Optional[str] = Field( + None, + description="""Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.""", + ) + lab_meta_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Place-holder than can be extended so that lab-specific meta-data can be placed in /general.""", + ) + devices: Optional[List[Device] | Device] = Field( + default_factory=dict, + description="""Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.""", + ) + subject: Optional[str] = Field( + None, + description="""Information about the animal or person from which the data was measured.""", + ) + extracellular_ephys: Optional[str] = Field( + None, description="""Metadata related to extracellular electrophysiology.""" + ) + intracellular_ephys: Optional[str] = Field( + None, description="""Metadata related to intracellular electrophysiology.""" + ) + optogenetics: Optional[List[OptogeneticStimulusSite] | OptogeneticStimulusSite] = ( + Field( + default_factory=dict, + description="""Metadata describing optogenetic stimuluation.""", + ) + ) + optophysiology: Optional[List[ImagingPlane] | ImagingPlane] = Field( + default_factory=dict, description="""Metadata related to optophysiology.""" + ) + + +class NWBFileGeneralSourceScript(ConfiguredBaseModel): + """ + Script file or link to public source code used to create this NWB file. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["source_script"] = Field("source_script") + file_name: Optional[str] = Field(None, description="""Name of script file.""") + value: str = Field(...) + + +class NWBFileGeneralExtracellularEphys(ConfiguredBaseModel): + """ + Metadata related to extracellular electrophysiology. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["extracellular_ephys"] = Field("extracellular_ephys") + electrode_group: Optional[List[str] | str] = Field( + default_factory=list, description="""Physical group of electrodes.""" + ) + electrodes: Optional[str] = Field( + None, + description="""A table of all electrodes (i.e. channels) used for recording.""", + ) + + +class NWBFileGeneralExtracellularEphysElectrodes(DynamicTable): + """ + A table of all electrodes (i.e. channels) used for recording. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["electrodes"] = Field("electrodes") + x: Optional[List[float] | float] = Field( + default_factory=list, + description="""x coordinate of the channel location in the brain (+x is posterior).""", + ) + y: Optional[List[float] | float] = Field( + default_factory=list, + description="""y coordinate of the channel location in the brain (+y is inferior).""", + ) + z: Optional[List[float] | float] = Field( + default_factory=list, + description="""z coordinate of the channel location in the brain (+z is right).""", + ) + imp: Optional[List[float] | float] = Field( + default_factory=list, description="""Impedance of the channel, in ohms.""" + ) + location: Optional[List[str] | str] = Field( + default_factory=list, + description="""Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + filtering: Optional[List[str] | str] = Field( + default_factory=list, + description="""Description of hardware filtering, including the filter name and frequency cutoffs.""", + ) + group: Optional[List[str] | str] = Field( + default_factory=list, + description="""Reference to the ElectrodeGroup this electrode is a part of.""", + ) + group_name: Optional[List[str] | str] = Field( + default_factory=list, + description="""Name of the ElectrodeGroup this electrode is a part of.""", + ) + rel_x: Optional[List[float] | float] = Field( + default_factory=list, description="""x coordinate in electrode group""" + ) + rel_y: Optional[List[float] | float] = Field( + default_factory=list, description="""y coordinate in electrode group""" + ) + rel_z: Optional[List[float] | float] = Field( + default_factory=list, description="""z coordinate in electrode group""" + ) + reference: Optional[List[str] | str] = Field( + default_factory=list, + description="""Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".""", + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class NWBFileGeneralIntracellularEphys(ConfiguredBaseModel): + """ + Metadata related to intracellular electrophysiology. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["intracellular_ephys"] = Field("intracellular_ephys") + filtering: Optional[str] = Field( + None, + description="""[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.""", + ) + intracellular_electrode: Optional[List[str] | str] = Field( + default_factory=list, description="""An intracellular electrode.""" + ) + sweep_table: Optional[str] = Field( + None, + description="""[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.""", + ) + intracellular_recordings: Optional[str] = Field( + None, + description="""A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used.""", + ) + simultaneous_recordings: Optional[str] = Field( + None, + description="""A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes""", + ) + sequential_recordings: Optional[str] = Field( + None, + description="""A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence.""", + ) + repetitions: Optional[str] = Field( + None, + description="""A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence.""", + ) + experimental_conditions: Optional[str] = Field( + None, + description="""A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.""", + ) + + +class LabMetaData(NWBContainer): + """ + Lab-specific meta-data. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + + +class Subject(NWBContainer): + """ + Information about the animal or person from which the data was measured. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + age: Optional[str] = Field( + None, + description="""Age of subject. Can be supplied instead of 'date_of_birth'.""", + ) + date_of_birth: Optional[datetime] = Field( + None, + description="""Date of birth of subject. Can be supplied instead of 'age'.""", + ) + description: Optional[str] = Field( + None, + description="""Description of subject and where subject came from (e.g., breeder, if animal).""", + ) + genotype: Optional[str] = Field( + None, description="""Genetic strain. If absent, assume Wild Type (WT).""" + ) + sex: Optional[str] = Field(None, description="""Gender of subject.""") + species: Optional[str] = Field(None, description="""Species of subject.""") + strain: Optional[str] = Field(None, description="""Strain of subject.""") + subject_id: Optional[str] = Field( + None, + description="""ID of animal/person used/participating in experiment (lab convention).""", + ) + weight: Optional[str] = Field( + None, + description="""Weight at time of experiment, at time of surgery and at other important times.""", + ) + + +class SubjectAge(ConfiguredBaseModel): + """ + Age of subject. Can be supplied instead of 'date_of_birth'. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["age"] = Field("age") + reference: Optional[str] = Field( + None, + description="""Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied.""", + ) + value: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ScratchData.model_rebuild() +NWBFile.model_rebuild() +NWBFileStimulus.model_rebuild() +NWBFileGeneral.model_rebuild() +NWBFileGeneralSourceScript.model_rebuild() +NWBFileGeneralExtracellularEphys.model_rebuild() +NWBFileGeneralExtracellularEphysElectrodes.model_rebuild() +NWBFileGeneralIntracellularEphys.model_rebuild() +LabMetaData.model_rebuild() +Subject.model_rebuild() +SubjectAge.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py new file mode 100644 index 0000000..46d7517 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_icephys.py @@ -0,0 +1,1282 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from ...hdmf_common.v1_8_0.hdmf_common_table import ( + DynamicTableRegion, + AlignedDynamicTable, + VectorIndex, + VectorData, + DynamicTable, +) + +from .core_nwb_base import ( + TimeSeriesStartingTime, + TimeSeriesReferenceVectorData, + NWBContainer, + TimeSeries, + TimeSeriesSync, +) + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class PatchClampSeries(TimeSeries): + """ + An abstract base class for patch-clamp data - stimulus or response, current or voltage. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + stimulus_description: Optional[str] = Field( + None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + ) + sweep_number: Optional[int] = Field( + None, + description="""Sweep number, allows to group different PatchClampSeries together.""", + ) + data: str = Field(..., description="""Recorded voltage or current.""") + gain: Optional[float] = Field( + None, + description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class PatchClampSeriesData(ConfiguredBaseModel): + """ + Recorded voltage or current. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["data"] = Field("data") + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + array: Optional[NDArray[Shape["* num_times"], float]] = Field(None) + + +class CurrentClampSeries(PatchClampSeries): + """ + Voltage data from an intracellular current-clamp recording. A corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is used to store the current injected. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: str = Field(..., description="""Recorded voltage.""") + bias_current: Optional[float] = Field( + None, description="""Bias current, in amps.""" + ) + bridge_balance: Optional[float] = Field( + None, description="""Bridge balance, in ohms.""" + ) + capacitance_compensation: Optional[float] = Field( + None, description="""Capacitance compensation, in farads.""" + ) + stimulus_description: Optional[str] = Field( + None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + ) + sweep_number: Optional[int] = Field( + None, + description="""Sweep number, allows to group different PatchClampSeries together.""", + ) + gain: Optional[float] = Field( + None, + description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class CurrentClampSeriesData(ConfiguredBaseModel): + """ + Recorded voltage. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["data"] = Field("data") + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + value: Any = Field(...) + + +class IZeroClampSeries(CurrentClampSeries): + """ + Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + stimulus_description: Optional[str] = Field( + None, + description="""An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"""", + ) + bias_current: float = Field( + ..., description="""Bias current, in amps, fixed to 0.0.""" + ) + bridge_balance: float = Field( + ..., description="""Bridge balance, in ohms, fixed to 0.0.""" + ) + capacitance_compensation: float = Field( + ..., description="""Capacitance compensation, in farads, fixed to 0.0.""" + ) + data: str = Field(..., description="""Recorded voltage.""") + sweep_number: Optional[int] = Field( + None, + description="""Sweep number, allows to group different PatchClampSeries together.""", + ) + gain: Optional[float] = Field( + None, + description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class CurrentClampStimulusSeries(PatchClampSeries): + """ + Stimulus current applied during current clamp recording. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: str = Field(..., description="""Stimulus current applied.""") + stimulus_description: Optional[str] = Field( + None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + ) + sweep_number: Optional[int] = Field( + None, + description="""Sweep number, allows to group different PatchClampSeries together.""", + ) + gain: Optional[float] = Field( + None, + description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class CurrentClampStimulusSeriesData(ConfiguredBaseModel): + """ + Stimulus current applied. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["data"] = Field("data") + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + value: Any = Field(...) + + +class VoltageClampSeries(PatchClampSeries): + """ + Current data from an intracellular voltage-clamp recording. A corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is used to store the voltage injected. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: str = Field(..., description="""Recorded current.""") + capacitance_fast: Optional[str] = Field( + None, description="""Fast capacitance, in farads.""" + ) + capacitance_slow: Optional[str] = Field( + None, description="""Slow capacitance, in farads.""" + ) + resistance_comp_bandwidth: Optional[str] = Field( + None, description="""Resistance compensation bandwidth, in hertz.""" + ) + resistance_comp_correction: Optional[str] = Field( + None, description="""Resistance compensation correction, in percent.""" + ) + resistance_comp_prediction: Optional[str] = Field( + None, description="""Resistance compensation prediction, in percent.""" + ) + whole_cell_capacitance_comp: Optional[str] = Field( + None, description="""Whole cell capacitance compensation, in farads.""" + ) + whole_cell_series_resistance_comp: Optional[str] = Field( + None, description="""Whole cell series resistance compensation, in ohms.""" + ) + stimulus_description: Optional[str] = Field( + None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + ) + sweep_number: Optional[int] = Field( + None, + description="""Sweep number, allows to group different PatchClampSeries together.""", + ) + gain: Optional[float] = Field( + None, + description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class VoltageClampSeriesData(ConfiguredBaseModel): + """ + Recorded current. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["data"] = Field("data") + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + value: Any = Field(...) + + +class VoltageClampSeriesCapacitanceFast(ConfiguredBaseModel): + """ + Fast capacitance, in farads. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["capacitance_fast"] = Field("capacitance_fast") + unit: Optional[str] = Field( + None, + description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + ) + value: float = Field(...) + + +class VoltageClampSeriesCapacitanceSlow(ConfiguredBaseModel): + """ + Slow capacitance, in farads. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["capacitance_slow"] = Field("capacitance_slow") + unit: Optional[str] = Field( + None, + description="""Unit of measurement for capacitance_fast, which is fixed to 'farads'.""", + ) + value: float = Field(...) + + +class VoltageClampSeriesResistanceCompBandwidth(ConfiguredBaseModel): + """ + Resistance compensation bandwidth, in hertz. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["resistance_comp_bandwidth"] = Field("resistance_comp_bandwidth") + unit: Optional[str] = Field( + None, + description="""Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'.""", + ) + value: float = Field(...) + + +class VoltageClampSeriesResistanceCompCorrection(ConfiguredBaseModel): + """ + Resistance compensation correction, in percent. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["resistance_comp_correction"] = Field("resistance_comp_correction") + unit: Optional[str] = Field( + None, + description="""Unit of measurement for resistance_comp_correction, which is fixed to 'percent'.""", + ) + value: float = Field(...) + + +class VoltageClampSeriesResistanceCompPrediction(ConfiguredBaseModel): + """ + Resistance compensation prediction, in percent. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["resistance_comp_prediction"] = Field("resistance_comp_prediction") + unit: Optional[str] = Field( + None, + description="""Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'.""", + ) + value: float = Field(...) + + +class VoltageClampSeriesWholeCellCapacitanceComp(ConfiguredBaseModel): + """ + Whole cell capacitance compensation, in farads. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["whole_cell_capacitance_comp"] = Field("whole_cell_capacitance_comp") + unit: Optional[str] = Field( + None, + description="""Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'.""", + ) + value: float = Field(...) + + +class VoltageClampSeriesWholeCellSeriesResistanceComp(ConfiguredBaseModel): + """ + Whole cell series resistance compensation, in ohms. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["whole_cell_series_resistance_comp"] = Field( + "whole_cell_series_resistance_comp" + ) + unit: Optional[str] = Field( + None, + description="""Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'.""", + ) + value: float = Field(...) + + +class VoltageClampStimulusSeries(PatchClampSeries): + """ + Stimulus voltage applied during a voltage clamp recording. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: str = Field(..., description="""Stimulus voltage applied.""") + stimulus_description: Optional[str] = Field( + None, description="""Protocol/stimulus name for this patch-clamp dataset.""" + ) + sweep_number: Optional[int] = Field( + None, + description="""Sweep number, allows to group different PatchClampSeries together.""", + ) + gain: Optional[float] = Field( + None, + description="""Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class VoltageClampStimulusSeriesData(ConfiguredBaseModel): + """ + Stimulus voltage applied. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["data"] = Field("data") + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.""", + ) + value: Any = Field(...) + + +class IntracellularElectrode(NWBContainer): + """ + An intracellular electrode and its metadata. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + cell_id: Optional[str] = Field(None, description="""unique ID of the cell""") + description: str = Field( + ..., + description="""Description of electrode (e.g., whole-cell, sharp, etc.).""", + ) + filtering: Optional[str] = Field( + None, description="""Electrode specific filtering.""" + ) + initial_access_resistance: Optional[str] = Field( + None, description="""Initial access resistance.""" + ) + location: Optional[str] = Field( + None, + description="""Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + resistance: Optional[str] = Field( + None, description="""Electrode resistance, in ohms.""" + ) + seal: Optional[str] = Field( + None, description="""Information about seal used for recording.""" + ) + slice: Optional[str] = Field( + None, description="""Information about slice used for recording.""" + ) + + +class SweepTable(DynamicTable): + """ + [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + sweep_number: Optional[List[int] | int] = Field( + default_factory=list, + description="""Sweep number of the PatchClampSeries in that row.""", + ) + series: Optional[List[str] | str] = Field( + default_factory=list, + description="""The PatchClampSeries with the sweep number in that row.""", + ) + series_index: str = Field(..., description="""Index for series.""") + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class SweepTableSeriesIndex(VectorIndex): + """ + Index for series. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["series_index"] = Field("series_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class IntracellularElectrodesTable(DynamicTable): + """ + Table for storing intracellular electrode related metadata. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + electrode: Optional[List[str] | str] = Field( + default_factory=list, + description="""Column for storing the reference to the intracellular electrode.""", + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class IntracellularStimuliTable(DynamicTable): + """ + Table for storing intracellular stimulus related metadata. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + stimulus: str = Field( + ..., + description="""Column storing the reference to the recorded stimulus for the recording (rows).""", + ) + stimulus_template: Optional[str] = Field( + None, + description="""Column storing the reference to the stimulus template for the recording (rows).""", + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class IntracellularStimuliTableStimulus(TimeSeriesReferenceVectorData): + """ + Column storing the reference to the recorded stimulus for the recording (rows). + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["stimulus"] = Field("stimulus") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class IntracellularStimuliTableStimulusTemplate(TimeSeriesReferenceVectorData): + """ + Column storing the reference to the stimulus template for the recording (rows). + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["stimulus_template"] = Field("stimulus_template") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class IntracellularResponsesTable(DynamicTable): + """ + Table for storing intracellular response related metadata. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + response: str = Field( + ..., + description="""Column storing the reference to the recorded response for the recording (rows)""", + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class IntracellularResponsesTableResponse(TimeSeriesReferenceVectorData): + """ + Column storing the reference to the recorded response for the recording (rows) + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["response"] = Field("response") + idx_start: int = Field( + ..., + description="""Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time.""", + ) + count: int = Field( + ..., + description="""Number of data samples available in this time series, during this epoch""", + ) + timeseries: str = Field( + ..., description="""The TimeSeries that this index applies to""" + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class IntracellularRecordingsTable(AlignedDynamicTable): + """ + A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: Literal["intracellular_recordings"] = Field("intracellular_recordings") + description: Optional[str] = Field( + None, + description="""Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute.""", + ) + electrodes: str = Field( + ..., + description="""Table for storing intracellular electrode related metadata.""", + ) + stimuli: str = Field( + ..., + description="""Table for storing intracellular stimulus related metadata.""", + ) + responses: str = Field( + ..., + description="""Table for storing intracellular response related metadata.""", + ) + children: Optional[ + List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable] + ] = Field(default_factory=dict) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class SimultaneousRecordingsTable(DynamicTable): + """ + A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: Literal["simultaneous_recordings"] = Field("simultaneous_recordings") + recordings: str = Field( + ..., + description="""A reference to one or more rows in the IntracellularRecordingsTable table.""", + ) + recordings_index: str = Field( + ..., description="""Index dataset for the recordings column.""" + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class SimultaneousRecordingsTableRecordings(DynamicTableRegion): + """ + A reference to one or more rows in the IntracellularRecordingsTable table. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["recordings"] = Field("recordings") + table: Optional[str] = Field( + None, + description="""Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class SimultaneousRecordingsTableRecordingsIndex(VectorIndex): + """ + Index dataset for the recordings column. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["recordings_index"] = Field("recordings_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class SequentialRecordingsTable(DynamicTable): + """ + A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: Literal["sequential_recordings"] = Field("sequential_recordings") + simultaneous_recordings: str = Field( + ..., + description="""A reference to one or more rows in the SimultaneousRecordingsTable table.""", + ) + simultaneous_recordings_index: str = Field( + ..., description="""Index dataset for the simultaneous_recordings column.""" + ) + stimulus_type: Optional[List[str] | str] = Field( + default_factory=list, + description="""The type of stimulus used for the sequential recording.""", + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class SequentialRecordingsTableSimultaneousRecordings(DynamicTableRegion): + """ + A reference to one or more rows in the SimultaneousRecordingsTable table. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["simultaneous_recordings"] = Field("simultaneous_recordings") + table: Optional[str] = Field( + None, + description="""Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class SequentialRecordingsTableSimultaneousRecordingsIndex(VectorIndex): + """ + Index dataset for the simultaneous_recordings column. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["simultaneous_recordings_index"] = Field( + "simultaneous_recordings_index" + ) + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class RepetitionsTable(DynamicTable): + """ + A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: Literal["repetitions"] = Field("repetitions") + sequential_recordings: str = Field( + ..., + description="""A reference to one or more rows in the SequentialRecordingsTable table.""", + ) + sequential_recordings_index: str = Field( + ..., description="""Index dataset for the sequential_recordings column.""" + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class RepetitionsTableSequentialRecordings(DynamicTableRegion): + """ + A reference to one or more rows in the SequentialRecordingsTable table. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["sequential_recordings"] = Field("sequential_recordings") + table: Optional[str] = Field( + None, + description="""Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class RepetitionsTableSequentialRecordingsIndex(VectorIndex): + """ + Index dataset for the sequential_recordings column. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["sequential_recordings_index"] = Field("sequential_recordings_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ExperimentalConditionsTable(DynamicTable): + """ + A table for grouping different intracellular recording repetitions together that belong to the same experimental condition. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: Literal["experimental_conditions"] = Field("experimental_conditions") + repetitions: str = Field( + ..., + description="""A reference to one or more rows in the RepetitionsTable table.""", + ) + repetitions_index: str = Field( + ..., description="""Index dataset for the repetitions column.""" + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class ExperimentalConditionsTableRepetitions(DynamicTableRegion): + """ + A reference to one or more rows in the RepetitionsTable table. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["repetitions"] = Field("repetitions") + table: Optional[str] = Field( + None, + description="""Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ExperimentalConditionsTableRepetitionsIndex(VectorIndex): + """ + Index dataset for the repetitions column. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["repetitions_index"] = Field("repetitions_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +PatchClampSeries.model_rebuild() +PatchClampSeriesData.model_rebuild() +CurrentClampSeries.model_rebuild() +CurrentClampSeriesData.model_rebuild() +IZeroClampSeries.model_rebuild() +CurrentClampStimulusSeries.model_rebuild() +CurrentClampStimulusSeriesData.model_rebuild() +VoltageClampSeries.model_rebuild() +VoltageClampSeriesData.model_rebuild() +VoltageClampSeriesCapacitanceFast.model_rebuild() +VoltageClampSeriesCapacitanceSlow.model_rebuild() +VoltageClampSeriesResistanceCompBandwidth.model_rebuild() +VoltageClampSeriesResistanceCompCorrection.model_rebuild() +VoltageClampSeriesResistanceCompPrediction.model_rebuild() +VoltageClampSeriesWholeCellCapacitanceComp.model_rebuild() +VoltageClampSeriesWholeCellSeriesResistanceComp.model_rebuild() +VoltageClampStimulusSeries.model_rebuild() +VoltageClampStimulusSeriesData.model_rebuild() +IntracellularElectrode.model_rebuild() +SweepTable.model_rebuild() +SweepTableSeriesIndex.model_rebuild() +IntracellularElectrodesTable.model_rebuild() +IntracellularStimuliTable.model_rebuild() +IntracellularStimuliTableStimulus.model_rebuild() +IntracellularStimuliTableStimulusTemplate.model_rebuild() +IntracellularResponsesTable.model_rebuild() +IntracellularResponsesTableResponse.model_rebuild() +IntracellularRecordingsTable.model_rebuild() +SimultaneousRecordingsTable.model_rebuild() +SimultaneousRecordingsTableRecordings.model_rebuild() +SimultaneousRecordingsTableRecordingsIndex.model_rebuild() +SequentialRecordingsTable.model_rebuild() +SequentialRecordingsTableSimultaneousRecordings.model_rebuild() +SequentialRecordingsTableSimultaneousRecordingsIndex.model_rebuild() +RepetitionsTable.model_rebuild() +RepetitionsTableSequentialRecordings.model_rebuild() +RepetitionsTableSequentialRecordingsIndex.model_rebuild() +ExperimentalConditionsTable.model_rebuild() +ExperimentalConditionsTableRepetitions.model_rebuild() +ExperimentalConditionsTableRepetitionsIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py new file mode 100644 index 0000000..9931d79 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_image.py @@ -0,0 +1,402 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeriesSync, Image, TimeSeries + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class GrayscaleImage(Image): + """ + A grayscale image. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + resolution: Optional[float] = Field( + None, description="""Pixel resolution of the image, in pixels per centimeter.""" + ) + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r, g, b"], float], + NDArray[Shape["* x, * y, 4 r, g, b, a"], float], + ] + ] = Field(None) + + +class RGBImage(Image): + """ + A color image. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + resolution: Optional[float] = Field( + None, description="""Pixel resolution of the image, in pixels per centimeter.""" + ) + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r, g, b"], float], + NDArray[Shape["* x, * y, 4 r, g, b, a"], float], + ] + ] = Field(None) + + +class RGBAImage(Image): + """ + A color image with transparency. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + resolution: Optional[float] = Field( + None, description="""Pixel resolution of the image, in pixels per centimeter.""" + ) + description: Optional[str] = Field( + None, description="""Description of the image.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* x, * y"], float], + NDArray[Shape["* x, * y, 3 r, g, b"], float], + NDArray[Shape["* x, * y, 4 r, g, b, a"], float], + ] + ] = Field(None) + + +class ImageSeries(TimeSeries): + """ + General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z]. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], + ] = Field( + ..., + description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", + ) + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( + None, description="""Number of pixels on x, y, (and z) axes.""" + ) + external_file: Optional[str] = Field( + None, + description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", + ) + format: Optional[str] = Field( + None, + description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class ImageSeriesExternalFile(ConfiguredBaseModel): + """ + Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["external_file"] = Field("external_file") + starting_frame: Optional[int] = Field( + None, + description="""Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].""", + ) + array: Optional[NDArray[Shape["* num_files"], str]] = Field(None) + + +class ImageMaskSeries(ImageSeries): + """ + An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], + ] = Field( + ..., + description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", + ) + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( + None, description="""Number of pixels on x, y, (and z) axes.""" + ) + external_file: Optional[str] = Field( + None, + description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", + ) + format: Optional[str] = Field( + None, + description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class OpticalSeries(ImageSeries): + """ + Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + distance: Optional[float] = Field( + None, description="""Distance from camera/monitor to target/eye.""" + ) + field_of_view: Optional[ + Union[ + NDArray[Shape["2 width, height"], float], + NDArray[Shape["3 width, height, depth"], float], + ] + ] = Field( + None, + description="""Width, height and depth of image, or imaged area, in meters.""", + ) + data: Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, 3 r, g, b"], float], + ] = Field( + ..., description="""Images presented to subject, either grayscale or RGB""" + ) + orientation: Optional[str] = Field( + None, + description="""Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.""", + ) + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( + None, description="""Number of pixels on x, y, (and z) axes.""" + ) + external_file: Optional[str] = Field( + None, + description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", + ) + format: Optional[str] = Field( + None, + description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class IndexSeries(TimeSeries): + """ + Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: NDArray[Shape["* num_times"], int] = Field( + ..., + description="""Index of the image (using zero-indexing) in the linked Images object.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +GrayscaleImage.model_rebuild() +RGBImage.model_rebuild() +RGBAImage.model_rebuild() +ImageSeries.model_rebuild() +ImageSeriesExternalFile.model_rebuild() +ImageMaskSeries.model_rebuild() +OpticalSeries.model_rebuild() +IndexSeries.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_language.py new file mode 100644 index 0000000..f3b68d9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_language.py @@ -0,0 +1,94 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +metamodel_version = "None" +version = "None" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py new file mode 100644 index 0000000..18ed3da --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_misc.py @@ -0,0 +1,631 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from ...hdmf_common.v1_8_0.hdmf_common_table import ( + DynamicTableRegion, + VectorIndex, + VectorData, + DynamicTable, +) + +from .core_nwb_ecephys import ElectrodeGroup + +from .core_nwb_base import TimeSeriesStartingTime, TimeSeries, TimeSeriesSync + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class AbstractFeatureSeries(TimeSeries): + """ + Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: str = Field(..., description="""Values of each feature at each time.""") + feature_units: Optional[NDArray[Shape["* num_features"], str]] = Field( + None, description="""Units of each feature.""" + ) + features: NDArray[Shape["* num_features"], str] = Field( + ..., + description="""Description of the features represented in TimeSeries::data.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class AbstractFeatureSeriesData(ConfiguredBaseModel): + """ + Values of each feature at each time. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["data"] = Field("data") + unit: Optional[str] = Field( + None, + description="""Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".""", + ) + array: Optional[ + Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_features"], float], + ] + ] = Field(None) + + +class AnnotationSeries(TimeSeries): + """ + Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: NDArray[Shape["* num_times"], str] = Field( + ..., description="""Annotations made during an experiment.""" + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class IntervalSeries(TimeSeries): + """ + Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: NDArray[Shape["* num_times"], int] = Field( + ..., description="""Use values >0 if interval started, <0 if interval ended.""" + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class DecompositionSeries(TimeSeries): + """ + Spectral analysis of a time series, e.g. of an LFP or a speech signal. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: str = Field(..., description="""Data decomposed into frequency bands.""") + metric: str = Field( + ..., description="""The metric used, e.g. phase, amplitude, power.""" + ) + source_channels: Optional[str] = Field( + None, + description="""DynamicTableRegion pointer to the channels that this decomposition series was generated from.""", + ) + bands: str = Field( + ..., + description="""Table for describing the bands that this series was generated from. There should be one row in this table for each band.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class DecompositionSeriesData(ConfiguredBaseModel): + """ + Data decomposed into frequency bands. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["data"] = Field("data") + unit: Optional[str] = Field( + None, + description="""Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'.""", + ) + array: Optional[ + NDArray[Shape["* num_times, * num_channels, * num_bands"], float] + ] = Field(None) + + +class DecompositionSeriesSourceChannels(DynamicTableRegion): + """ + DynamicTableRegion pointer to the channels that this decomposition series was generated from. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["source_channels"] = Field("source_channels") + table: Optional[str] = Field( + None, + description="""Reference to the DynamicTable object that this region applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DecompositionSeriesBands(DynamicTable): + """ + Table for describing the bands that this series was generated from. There should be one row in this table for each band. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["bands"] = Field("bands") + band_name: Optional[List[str] | str] = Field( + default_factory=list, description="""Name of the band, e.g. theta.""" + ) + band_limits: NDArray[Shape["* num_bands, 2 low, high"], float] = Field( + ..., + description="""Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center.""", + ) + band_mean: NDArray[Shape["* num_bands"], float] = Field( + ..., description="""The mean Gaussian filters, in Hz.""" + ) + band_stdev: NDArray[Shape["* num_bands"], float] = Field( + ..., description="""The standard deviation of Gaussian filters, in Hz.""" + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class Units(DynamicTable): + """ + Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field("Units") + spike_times_index: Optional[str] = Field( + None, description="""Index into the spike_times dataset.""" + ) + spike_times: Optional[str] = Field( + None, description="""Spike times for each unit in seconds.""" + ) + obs_intervals_index: Optional[str] = Field( + None, description="""Index into the obs_intervals dataset.""" + ) + obs_intervals: Optional[NDArray[Shape["* num_intervals, 2 start|end"], float]] = ( + Field(None, description="""Observation intervals for each unit.""") + ) + electrodes_index: Optional[str] = Field( + None, description="""Index into electrodes.""" + ) + electrodes: Optional[str] = Field( + None, + description="""Electrode that each spike unit came from, specified using a DynamicTableRegion.""", + ) + electrode_group: Optional[List[str] | str] = Field( + default_factory=list, + description="""Electrode group that each spike unit came from.""", + ) + waveform_mean: Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] + ] = Field(None, description="""Spike waveform mean for each spike unit.""") + waveform_sd: Optional[ + Union[ + NDArray[Shape["* num_units, * num_samples"], float], + NDArray[Shape["* num_units, * num_samples, * num_electrodes"], float], + ] + ] = Field( + None, description="""Spike waveform standard deviation for each spike unit.""" + ) + waveforms: Optional[NDArray[Shape["* num_waveforms, * num_samples"], float]] = ( + Field( + None, + description="""Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.""", + ) + ) + waveforms_index: Optional[str] = Field( + None, + description="""Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.""", + ) + waveforms_index_index: Optional[str] = Field( + None, + description="""Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.""", + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class UnitsSpikeTimesIndex(VectorIndex): + """ + Index into the spike_times dataset. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["spike_times_index"] = Field("spike_times_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsSpikeTimes(VectorData): + """ + Spike times for each unit in seconds. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["spike_times"] = Field("spike_times") + resolution: Optional[float] = Field( + None, + description="""The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsObsIntervalsIndex(VectorIndex): + """ + Index into the obs_intervals dataset. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["obs_intervals_index"] = Field("obs_intervals_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsElectrodesIndex(VectorIndex): + """ + Index into electrodes. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["electrodes_index"] = Field("electrodes_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsElectrodes(DynamicTableRegion): + """ + Electrode that each spike unit came from, specified using a DynamicTableRegion. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["electrodes"] = Field("electrodes") + table: Optional[str] = Field( + None, + description="""Reference to the DynamicTable object that this region applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveformsIndex(VectorIndex): + """ + Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["waveforms_index"] = Field("waveforms_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class UnitsWaveformsIndexIndex(VectorIndex): + """ + Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["waveforms_index_index"] = Field("waveforms_index_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +AbstractFeatureSeries.model_rebuild() +AbstractFeatureSeriesData.model_rebuild() +AnnotationSeries.model_rebuild() +IntervalSeries.model_rebuild() +DecompositionSeries.model_rebuild() +DecompositionSeriesData.model_rebuild() +DecompositionSeriesSourceChannels.model_rebuild() +DecompositionSeriesBands.model_rebuild() +Units.model_rebuild() +UnitsSpikeTimesIndex.model_rebuild() +UnitsSpikeTimes.model_rebuild() +UnitsObsIntervalsIndex.model_rebuild() +UnitsElectrodesIndex.model_rebuild() +UnitsElectrodes.model_rebuild() +UnitsWaveformsIndex.model_rebuild() +UnitsWaveformsIndexIndex.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py new file mode 100644 index 0000000..df23c11 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ogen.py @@ -0,0 +1,164 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .core_nwb_base import ( + NWBContainer, + TimeSeries, + TimeSeriesStartingTime, + TimeSeriesSync, +) + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class OptogeneticSeries(TimeSeries): + """ + An optogenetic stimulus. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_rois"], float], + ] = Field( + ..., + description="""Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class OptogeneticStimulusSite(NWBContainer): + """ + A site of optogenetic stimulation. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + description: str = Field(..., description="""Description of stimulation site.""") + excitation_lambda: float = Field( + ..., description="""Excitation wavelength, in nm.""" + ) + location: str = Field( + ..., + description="""Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.""", + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +OptogeneticSeries.model_rebuild() +OptogeneticStimulusSite.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py new file mode 100644 index 0000000..43f8486 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_ophys.py @@ -0,0 +1,575 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .core_nwb_image import ImageSeriesExternalFile, ImageSeries + +from ...hdmf_common.v1_8_0.hdmf_common_table import ( + DynamicTableRegion, + VectorData, + VectorIndex, + DynamicTable, +) + +from .core_nwb_base import ( + TimeSeriesStartingTime, + NWBContainer, + TimeSeries, + NWBDataInterface, + TimeSeriesSync, +) + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class OnePhotonSeries(ImageSeries): + """ + Image stack recorded over time from 1-photon microscope. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( + None, + description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", + ) + exposure_time: Optional[float] = Field( + None, + description="""Exposure time of the sample; often the inverse of the frequency.""", + ) + binning: Optional[int] = Field( + None, + description="""Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.""", + ) + power: Optional[float] = Field( + None, description="""Power of the excitation in mW, if known.""" + ) + intensity: Optional[float] = Field( + None, description="""Intensity of the excitation in mW/mm^2, if known.""" + ) + data: Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], + ] = Field( + ..., + description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", + ) + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( + None, description="""Number of pixels on x, y, (and z) axes.""" + ) + external_file: Optional[str] = Field( + None, + description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", + ) + format: Optional[str] = Field( + None, + description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class TwoPhotonSeries(ImageSeries): + """ + Image stack recorded over time from 2-photon microscope. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + pmt_gain: Optional[float] = Field(None, description="""Photomultiplier gain.""") + scan_line_rate: Optional[float] = Field( + None, + description="""Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.""", + ) + field_of_view: Optional[ + Union[ + NDArray[Shape["2 width|height"], float], + NDArray[Shape["3 width|height|depth"], float], + ] + ] = Field( + None, + description="""Width, height and depth of image, or imaged area, in meters.""", + ) + data: Union[ + NDArray[Shape["* frame, * x, * y"], float], + NDArray[Shape["* frame, * x, * y, * z"], float], + ] = Field( + ..., + description="""Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array.""", + ) + dimension: Optional[NDArray[Shape["* rank"], int]] = Field( + None, description="""Number of pixels on x, y, (and z) axes.""" + ) + external_file: Optional[str] = Field( + None, + description="""Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.""", + ) + format: Optional[str] = Field( + None, + description="""Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class RoiResponseSeries(TimeSeries): + """ + ROI responses over an imaging plane. The first dimension represents time. The second dimension, if present, represents ROIs. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + data: Union[ + NDArray[Shape["* num_times"], float], + NDArray[Shape["* num_times, * num_ROIs"], float], + ] = Field(..., description="""Signals from ROIs.""") + rois: str = Field( + ..., + description="""DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries.""", + ) + description: Optional[str] = Field( + None, description="""Description of the time series.""" + ) + comments: Optional[str] = Field( + None, + description="""Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.""", + ) + starting_time: Optional[str] = Field( + None, + description="""Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.""", + ) + timestamps: Optional[NDArray[Shape["* num_times"], float]] = Field( + None, + description="""Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.""", + ) + control: Optional[NDArray[Shape["* num_times"], int]] = Field( + None, + description="""Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.""", + ) + control_description: Optional[NDArray[Shape["* num_control_values"], str]] = Field( + None, + description="""Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.""", + ) + sync: Optional[str] = Field( + None, + description="""Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.""", + ) + + +class RoiResponseSeriesRois(DynamicTableRegion): + """ + DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["rois"] = Field("rois") + table: Optional[str] = Field( + None, + description="""Reference to the DynamicTable object that this region applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DfOverF(NWBDataInterface): + """ + dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class Fluorescence(NWBDataInterface): + """ + Fluorescence information about a region of interest (ROI). Storage hierarchy of fluorescence should be the same as for segmentation (ie, same names for ROIs and for image planes). + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[RoiResponseSeries] | RoiResponseSeries] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class ImageSegmentation(NWBDataInterface): + """ + Stores pixels in an image that represent different regions of interest (ROIs) or masks. All segmentation for a given imaging plane is stored together, with storage for multiple imaging planes (masks) supported. Each ROI is stored in its own subgroup, with the ROI group containing both a 2D mask and a list of pixels that make up this mask. Segments can also be used for masking neuropil. If segmentation is allowed to change with time, a new imaging plane (or module) is required and ROI names should remain consistent between them. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[PlaneSegmentation] | PlaneSegmentation] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class PlaneSegmentation(DynamicTable): + """ + Results from image segmentation of a specific imaging plane. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + image_mask: Optional[ + Union[ + NDArray[Shape["* num_roi, * num_x, * num_y"], Any], + NDArray[Shape["* num_roi, * num_x, * num_y, * num_z"], Any], + ] + ] = Field( + None, + description="""ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.""", + ) + pixel_mask_index: Optional[str] = Field( + None, description="""Index into pixel_mask.""" + ) + pixel_mask: Optional[str] = Field( + None, + description="""Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + ) + voxel_mask_index: Optional[str] = Field( + None, description="""Index into voxel_mask.""" + ) + voxel_mask: Optional[str] = Field( + None, + description="""Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation""", + ) + reference_images: Optional[List[ImageSeries] | ImageSeries] = Field( + default_factory=dict, + description="""Image stacks that the segmentation masks apply to.""", + ) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class PlaneSegmentationPixelMaskIndex(VectorIndex): + """ + Index into pixel_mask. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["pixel_mask_index"] = Field("pixel_mask_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class PlaneSegmentationPixelMask(VectorData): + """ + Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["pixel_mask"] = Field("pixel_mask") + x: Optional[int] = Field(None, description="""Pixel x-coordinate.""") + y: Optional[int] = Field(None, description="""Pixel y-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the pixel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class PlaneSegmentationVoxelMaskIndex(VectorIndex): + """ + Index into voxel_mask. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["voxel_mask_index"] = Field("voxel_mask_index") + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class PlaneSegmentationVoxelMask(VectorData): + """ + Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["voxel_mask"] = Field("voxel_mask") + x: Optional[int] = Field(None, description="""Voxel x-coordinate.""") + y: Optional[int] = Field(None, description="""Voxel y-coordinate.""") + z: Optional[int] = Field(None, description="""Voxel z-coordinate.""") + weight: Optional[float] = Field(None, description="""Weight of the voxel.""") + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ImagingPlane(NWBContainer): + """ + An imaging plane and its metadata. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[OpticalChannel] | OpticalChannel] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class OpticalChannel(NWBContainer): + """ + An optical channel used to record from an imaging plane. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + description: str = Field( + ..., description="""Description or other notes about the channel.""" + ) + emission_lambda: float = Field( + ..., description="""Emission wavelength for channel, in nm.""" + ) + + +class MotionCorrection(NWBDataInterface): + """ + An image stack where all frames are shifted (registered) to a common coordinate system, to account for movement and drift between frames. Note: each frame at each point in time is assumed to be 2-D (has only x & y dimensions). + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[CorrectedImageStack] | CorrectedImageStack] = Field( + default_factory=dict + ) + name: str = Field(...) + + +class CorrectedImageStack(NWBDataInterface): + """ + Results from motion correction of an image stack. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + corrected: str = Field( + ..., + description="""Image stack with frames shifted to the common coordinates.""", + ) + xy_translation: str = Field( + ..., + description="""Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image.""", + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +OnePhotonSeries.model_rebuild() +TwoPhotonSeries.model_rebuild() +RoiResponseSeries.model_rebuild() +RoiResponseSeriesRois.model_rebuild() +DfOverF.model_rebuild() +Fluorescence.model_rebuild() +ImageSegmentation.model_rebuild() +PlaneSegmentation.model_rebuild() +PlaneSegmentationPixelMaskIndex.model_rebuild() +PlaneSegmentationPixelMask.model_rebuild() +PlaneSegmentationVoxelMaskIndex.model_rebuild() +PlaneSegmentationVoxelMask.model_rebuild() +ImagingPlane.model_rebuild() +OpticalChannel.model_rebuild() +MotionCorrection.model_rebuild() +CorrectedImageStack.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py new file mode 100644 index 0000000..f412900 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/core_nwb_retinotopy.py @@ -0,0 +1,292 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .core_nwb_base import NWBDataInterface + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class ImagingRetinotopy(NWBDataInterface): + """ + DEPRECATED. Intrinsic signal optical imaging or widefield imaging for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses to specific stimuli and a combined polarity map from which to identify visual areas. This group does not store the raw responses imaged during retinotopic mapping or the stimuli presented, but rather the resulting phase and power maps after applying a Fourier transform on the averaged responses. Note: for data consistency, all images and arrays are stored in the format [row][column] and [row, col], which equates to [y][x]. Field of view and dimension arrays may appear backward (i.e., y before x). + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field("ImagingRetinotopy") + axis_1_phase_map: str = Field( + ..., description="""Phase response to stimulus on the first measured axis.""" + ) + axis_1_power_map: Optional[str] = Field( + None, + description="""Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""", + ) + axis_2_phase_map: str = Field( + ..., description="""Phase response to stimulus on the second measured axis.""" + ) + axis_2_power_map: Optional[str] = Field( + None, + description="""Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.""", + ) + axis_descriptions: NDArray[Shape["2 axis_1, axis_2"], str] = Field( + ..., + description="""Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta'].""", + ) + focal_depth_image: Optional[str] = Field( + None, + description="""Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].""", + ) + sign_map: Optional[str] = Field( + None, + description="""Sine of the angle between the direction of the gradient in axis_1 and axis_2.""", + ) + vasculature_image: str = Field( + ..., + description="""Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]""", + ) + + +class ImagingRetinotopyAxis1PhaseMap(ConfiguredBaseModel): + """ + Phase response to stimulus on the first measured axis. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["axis_1_phase_map"] = Field("axis_1_phase_map") + dimension: Optional[int] = Field( + None, + description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", + ) + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + unit: Optional[str] = Field( + None, description="""Unit that axis data is stored in (e.g., degrees).""" + ) + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) + + +class ImagingRetinotopyAxis1PowerMap(ConfiguredBaseModel): + """ + Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["axis_1_power_map"] = Field("axis_1_power_map") + dimension: Optional[int] = Field( + None, + description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", + ) + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + unit: Optional[str] = Field( + None, description="""Unit that axis data is stored in (e.g., degrees).""" + ) + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) + + +class ImagingRetinotopyAxis2PhaseMap(ConfiguredBaseModel): + """ + Phase response to stimulus on the second measured axis. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["axis_2_phase_map"] = Field("axis_2_phase_map") + dimension: Optional[int] = Field( + None, + description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", + ) + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + unit: Optional[str] = Field( + None, description="""Unit that axis data is stored in (e.g., degrees).""" + ) + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) + + +class ImagingRetinotopyAxis2PowerMap(ConfiguredBaseModel): + """ + Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["axis_2_power_map"] = Field("axis_2_power_map") + dimension: Optional[int] = Field( + None, + description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", + ) + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + unit: Optional[str] = Field( + None, description="""Unit that axis data is stored in (e.g., degrees).""" + ) + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) + + +class ImagingRetinotopyFocalDepthImage(ConfiguredBaseModel): + """ + Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns]. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["focal_depth_image"] = Field("focal_depth_image") + bits_per_pixel: Optional[int] = Field( + None, + description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value.""", + ) + dimension: Optional[int] = Field( + None, + description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", + ) + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + focal_depth: Optional[float] = Field( + None, description="""Focal depth offset, in meters.""" + ) + format: Optional[str] = Field( + None, description="""Format of image. Right now only 'raw' is supported.""" + ) + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(None) + + +class ImagingRetinotopySignMap(ConfiguredBaseModel): + """ + Sine of the angle between the direction of the gradient in axis_1 and axis_2. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["sign_map"] = Field("sign_map") + dimension: Optional[int] = Field( + None, + description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", + ) + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + array: Optional[NDArray[Shape["* num_rows, * num_cols"], float]] = Field(None) + + +class ImagingRetinotopyVasculatureImage(ConfiguredBaseModel): + """ + Gray-scale anatomical image of cortical surface. Array structure: [rows][columns] + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["vasculature_image"] = Field("vasculature_image") + bits_per_pixel: Optional[int] = Field( + None, + description="""Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value""", + ) + dimension: Optional[int] = Field( + None, + description="""Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width.""", + ) + field_of_view: Optional[float] = Field( + None, description="""Size of viewing area, in meters.""" + ) + format: Optional[str] = Field( + None, description="""Format of image. Right now only 'raw' is supported.""" + ) + array: Optional[NDArray[Shape["* num_rows, * num_cols"], int]] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +ImagingRetinotopy.model_rebuild() +ImagingRetinotopyAxis1PhaseMap.model_rebuild() +ImagingRetinotopyAxis1PowerMap.model_rebuild() +ImagingRetinotopyAxis2PhaseMap.model_rebuild() +ImagingRetinotopyAxis2PowerMap.model_rebuild() +ImagingRetinotopyFocalDepthImage.model_rebuild() +ImagingRetinotopySignMap.model_rebuild() +ImagingRetinotopyVasculatureImage.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py new file mode 100644 index 0000000..05648b2 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/core/v2_7_0/namespace.py @@ -0,0 +1,292 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from ...hdmf_experimental.v0_5_0.hdmf_experimental_resources import ( + HERD, + HERDKeys, + HERDFiles, + HERDEntities, + HERDObjects, + HERDObjectKeys, + HERDEntityKeys, +) + +from ...hdmf_common.v1_8_0.hdmf_common_sparse import CSRMatrix + +from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container, SimpleMultiContainer + +from ...hdmf_common.v1_8_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +from ...hdmf_experimental.v0_5_0.hdmf_experimental_experimental import EnumData + +from .core_nwb_retinotopy import ( + ImagingRetinotopy, + ImagingRetinotopyAxis1PhaseMap, + ImagingRetinotopyAxis1PowerMap, + ImagingRetinotopyAxis2PhaseMap, + ImagingRetinotopyAxis2PowerMap, + ImagingRetinotopyFocalDepthImage, + ImagingRetinotopySignMap, + ImagingRetinotopyVasculatureImage, +) + +from .core_nwb_base import ( + NWBData, + TimeSeriesReferenceVectorData, + Image, + ImageReferences, + NWBContainer, + NWBDataInterface, + TimeSeries, + TimeSeriesData, + TimeSeriesStartingTime, + TimeSeriesSync, + ProcessingModule, + Images, + ImagesOrderOfImages, +) + +from .core_nwb_ophys import ( + OnePhotonSeries, + TwoPhotonSeries, + RoiResponseSeries, + RoiResponseSeriesRois, + DfOverF, + Fluorescence, + ImageSegmentation, + PlaneSegmentation, + PlaneSegmentationPixelMaskIndex, + PlaneSegmentationPixelMask, + PlaneSegmentationVoxelMaskIndex, + PlaneSegmentationVoxelMask, + ImagingPlane, + OpticalChannel, + MotionCorrection, + CorrectedImageStack, +) + +from .core_nwb_device import Device + +from .core_nwb_image import ( + GrayscaleImage, + RGBImage, + RGBAImage, + ImageSeries, + ImageSeriesExternalFile, + ImageMaskSeries, + OpticalSeries, + IndexSeries, +) + +from .core_nwb_ogen import OptogeneticSeries, OptogeneticStimulusSite + +from .core_nwb_icephys import ( + PatchClampSeries, + PatchClampSeriesData, + CurrentClampSeries, + CurrentClampSeriesData, + IZeroClampSeries, + CurrentClampStimulusSeries, + CurrentClampStimulusSeriesData, + VoltageClampSeries, + VoltageClampSeriesData, + VoltageClampSeriesCapacitanceFast, + VoltageClampSeriesCapacitanceSlow, + VoltageClampSeriesResistanceCompBandwidth, + VoltageClampSeriesResistanceCompCorrection, + VoltageClampSeriesResistanceCompPrediction, + VoltageClampSeriesWholeCellCapacitanceComp, + VoltageClampSeriesWholeCellSeriesResistanceComp, + VoltageClampStimulusSeries, + VoltageClampStimulusSeriesData, + IntracellularElectrode, + SweepTable, + SweepTableSeriesIndex, + IntracellularElectrodesTable, + IntracellularStimuliTable, + IntracellularStimuliTableStimulus, + IntracellularStimuliTableStimulusTemplate, + IntracellularResponsesTable, + IntracellularResponsesTableResponse, + IntracellularRecordingsTable, + SimultaneousRecordingsTable, + SimultaneousRecordingsTableRecordings, + SimultaneousRecordingsTableRecordingsIndex, + SequentialRecordingsTable, + SequentialRecordingsTableSimultaneousRecordings, + SequentialRecordingsTableSimultaneousRecordingsIndex, + RepetitionsTable, + RepetitionsTableSequentialRecordings, + RepetitionsTableSequentialRecordingsIndex, + ExperimentalConditionsTable, + ExperimentalConditionsTableRepetitions, + ExperimentalConditionsTableRepetitionsIndex, +) + +from .core_nwb_ecephys import ( + ElectricalSeries, + ElectricalSeriesElectrodes, + SpikeEventSeries, + FeatureExtraction, + FeatureExtractionElectrodes, + EventDetection, + EventWaveform, + FilteredEphys, + LFP, + ElectrodeGroup, + ElectrodeGroupPosition, + ClusterWaveforms, + Clustering, +) + +from .core_nwb_behavior import ( + SpatialSeries, + SpatialSeriesData, + BehavioralEpochs, + BehavioralEvents, + BehavioralTimeSeries, + PupilTracking, + EyeTracking, + CompassDirection, + Position, +) + +from .core_nwb_misc import ( + AbstractFeatureSeries, + AbstractFeatureSeriesData, + AnnotationSeries, + IntervalSeries, + DecompositionSeries, + DecompositionSeriesData, + DecompositionSeriesSourceChannels, + DecompositionSeriesBands, + Units, + UnitsSpikeTimesIndex, + UnitsSpikeTimes, + UnitsObsIntervalsIndex, + UnitsElectrodesIndex, + UnitsElectrodes, + UnitsWaveformsIndex, + UnitsWaveformsIndexIndex, +) + +from .core_nwb_file import ( + ScratchData, + NWBFile, + NWBFileStimulus, + NWBFileGeneral, + NWBFileGeneralSourceScript, + NWBFileGeneralExtracellularEphys, + NWBFileGeneralExtracellularEphysElectrodes, + NWBFileGeneralIntracellularEphys, + LabMetaData, + Subject, + SubjectAge, +) + +from .core_nwb_epoch import ( + TimeIntervals, + TimeIntervalsTagsIndex, + TimeIntervalsTimeseries, + TimeIntervalsTimeseriesIndex, +) + + +metamodel_version = "None" +version = "2.7.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py index 680ece2..46d3b76 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_sparse.py @@ -1,15 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -63,7 +97,9 @@ class CSRMatrix(ConfiguredBaseModel): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") + shape: Optional[int] = Field( + None, description="""the shape of this sparse matrix""" + ) indices: str = Field(..., description="""column indices""") indptr: str = Field(..., description="""index pointer""") data: str = Field(..., description="""values in the matrix""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py index 5c814da..0fe1a19 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/hdmf_common_table.py @@ -1,26 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -43,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -114,7 +142,7 @@ class ElementIdentifiers(Data): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("element_id") class DynamicTableRegion(VectorData): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py index 5718e7e..b51df28 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_0/namespace.py @@ -1,23 +1,77 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from .hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) + +from .hdmf_common_table import ( + Data, + Index, + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + Container, + DynamicTable, +) + + metamodel_version = "None" version = "1.1.0" @@ -36,7 +90,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py index e7fd09a..67173a2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_sparse.py @@ -1,15 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -63,7 +97,9 @@ class CSRMatrix(ConfiguredBaseModel): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") + shape: Optional[int] = Field( + None, description="""the shape of this sparse matrix""" + ) indices: str = Field(..., description="""column indices""") indptr: str = Field(..., description="""index pointer""") data: str = Field(..., description="""values in the matrix""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py index 95f7501..2090d3b 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/hdmf_common_table.py @@ -1,26 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -43,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -114,7 +142,7 @@ class ElementIdentifiers(Data): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("element_id") class DynamicTableRegion(VectorData): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py index 4856982..5b61cad 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_2/namespace.py @@ -1,23 +1,77 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from .hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) + +from .hdmf_common_table import ( + Data, + Index, + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + Container, + DynamicTable, +) + + metamodel_version = "None" version = "1.1.2" @@ -36,7 +90,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py index 6fe2e19..69f5223 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_sparse.py @@ -1,15 +1,49 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): from typing import Literal @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -63,7 +97,9 @@ class CSRMatrix(ConfiguredBaseModel): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - shape: Optional[int] = Field(None, description="""the shape of this sparse matrix""") + shape: Optional[int] = Field( + None, description="""the shape of this sparse matrix""" + ) indices: str = Field(..., description="""column indices""") indptr: str = Field(..., description="""index pointer""") data: str = Field(..., description="""values in the matrix""") diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py index 137e404..09ffc1c 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/hdmf_common_table.py @@ -1,27 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -44,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -124,7 +151,7 @@ class ElementIdentifiers(Data): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("element_id") class DynamicTableRegion(VectorData): diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py index 9b24a5c..82deb5f 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_1_3/namespace.py @@ -1,23 +1,77 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from .hdmf_common_sparse import ( + CSRMatrix, + CSRMatrixIndices, + CSRMatrixIndptr, + CSRMatrixData, +) + +from .hdmf_common_table import ( + Data, + Index, + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + Container, + DynamicTable, +) + + metamodel_version = "None" version = "1.1.3" @@ -36,7 +90,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py index 0ae2f08..70d5e26 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_base.py @@ -1,21 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -38,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py index 7871a3e..960f611 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_sparse.py @@ -1,31 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .hdmf_common_base import Container + metamodel_version = "None" version = "1.5.0" @@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py index c751f46..0524910 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/hdmf_common_table.py @@ -1,33 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, - List, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from .hdmf_common_base import Container, Data + metamodel_version = "None" version = "1.5.0" @@ -46,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -115,7 +143,7 @@ class ElementIdentifiers(Data): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - name: str = Field(...) + name: str = Field("element_id") class DynamicTableRegion(VectorData): @@ -172,9 +200,9 @@ class AlignedDynamicTable(DynamicTable): """ linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) - children: Optional[List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable]] = ( - Field(default_factory=dict) - ) + children: Optional[ + List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable] + ] = Field(default_factory=dict) name: str = Field(...) colnames: Optional[str] = Field( None, diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py index 0c230ba..c8e87d0 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_5_0/namespace.py @@ -1,23 +1,72 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from .hdmf_common_sparse import CSRMatrix + +from .hdmf_common_base import Data, Container, SimpleMultiContainer + +from .hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + + metamodel_version = "None" version = "1.5.0" @@ -36,7 +85,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/__init__.py new file mode 100644 index 0000000..0519ecb --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py new file mode 100644 index 0000000..dde0ee4 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_base.py @@ -0,0 +1,125 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +metamodel_version = "None" +version = "1.8.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class Data(ConfiguredBaseModel): + """ + An abstract data type for a dataset. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + + +class Container(ConfiguredBaseModel): + """ + An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + + +class SimpleMultiContainer(Container): + """ + A simple Container for holding onto multiple containers. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[List[Container] | Container] = Field(default_factory=dict) + name: str = Field(...) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +Data.model_rebuild() +Container.model_rebuild() +SimpleMultiContainer.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_nwb_language.py new file mode 100644 index 0000000..f3b68d9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_nwb_language.py @@ -0,0 +1,94 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +metamodel_version = "None" +version = "None" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py new file mode 100644 index 0000000..1ae0e72 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_sparse.py @@ -0,0 +1,120 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .hdmf_common_base import Container + + +metamodel_version = "None" +version = "1.8.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class CSRMatrix(Container): + """ + A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + shape: Optional[int] = Field( + None, + description="""The shape (number of rows, number of columns) of this sparse matrix.""", + ) + indices: NDArray[Shape["* number of non-zero values"], int] = Field( + ..., description="""The column indices.""" + ) + indptr: NDArray[Shape["* number of rows in the matrix + 1"], int] = Field( + ..., description="""The row index pointer.""" + ) + data: NDArray[Shape["* number of non-zero values"], Any] = Field( + ..., description="""The non-zero values in the matrix.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +CSRMatrix.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py new file mode 100644 index 0000000..de599a6 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/hdmf_common_table.py @@ -0,0 +1,231 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .hdmf_common_base import Container, Data + + +metamodel_version = "None" +version = "1.8.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class VectorData(Data): + """ + An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class VectorIndex(VectorData): + """ + Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by \"_index\". + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + target: Optional[str] = Field( + None, + description="""Reference to the target dataset that this index applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class ElementIdentifiers(Data): + """ + A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field("element_id") + + +class DynamicTableRegion(VectorData): + """ + DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + table: Optional[str] = Field( + None, + description="""Reference to the DynamicTable object that this region applies to.""", + ) + description: Optional[str] = Field( + None, description="""Description of what this table region points to.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +class DynamicTable(Container): + """ + A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +class AlignedDynamicTable(DynamicTable): + """ + DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + children: Optional[ + List[Union[BaseModel, DynamicTable]] | Union[BaseModel, DynamicTable] + ] = Field(default_factory=dict) + name: str = Field(...) + colnames: Optional[str] = Field( + None, + description="""The names of the columns in this table. This should be used to specify an order to the columns.""", + ) + description: Optional[str] = Field( + None, description="""Description of what is in this dynamic table.""" + ) + id: NDArray[Shape["* num_rows"], int] = Field( + ..., + description="""Array of unique identifiers for the rows of this dynamic table.""", + ) + vector_data: Optional[List[str] | str] = Field( + default_factory=list, + description="""Vector columns, including index columns, of this dynamic table.""", + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +VectorData.model_rebuild() +VectorIndex.model_rebuild() +ElementIdentifiers.model_rebuild() +DynamicTableRegion.model_rebuild() +DynamicTable.model_rebuild() +AlignedDynamicTable.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py new file mode 100644 index 0000000..36513c2 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_common/v1_8_0/namespace.py @@ -0,0 +1,108 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .hdmf_common_sparse import CSRMatrix + +from .hdmf_common_base import Data, Container, SimpleMultiContainer + +from .hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + + +metamodel_version = "None" +version = "1.8.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/__init__.py index e69de29..0519ecb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/__init__.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py index 95ee626..ed113da 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_experimental.py @@ -1,32 +1,61 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np from ...hdmf_common.v1_5_0.hdmf_common_table import VectorData + metamodel_version = "None" version = "0.1.0" @@ -45,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_nwb_language.py index 4963581..f3b68d9 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_nwb_language.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_nwb_language.py @@ -1,20 +1,54 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np @@ -37,7 +71,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -56,14 +90,5 @@ class LinkML_Meta(BaseModel): tree_root: bool = False -class Arraylike(ConfiguredBaseModel): - """ - Container for arraylike information held in the dims, shape, and dtype properties.this is a special case to be interpreted by downstream i/o. this class has no slotsand is abstract by default.- Each slot within a subclass indicates a possible dimension.- Only dimensions that are present in all the dimension specifiers in the original schema are required.- Shape requirements are indicated using max/min cardinalities on the slot. - """ - - linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) - - # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model -Arraylike.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py index b0af83e..eef67d2 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/hdmf_experimental_resources.py @@ -1,30 +1,60 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, - ClassVar, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator from nptyping import ( Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, ) -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field - from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np -from ...hdmf_common.v1_5_0.hdmf_common_base import Container +from ...hdmf_common.v1_5_0.hdmf_common_base import Container, Data + metamodel_version = "None" version = "0.1.0" @@ -44,7 +74,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: @@ -70,27 +100,108 @@ class ExternalResources(Container): linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) name: str = Field(...) - keys: NDArray[Shape["* num_rows"], Any] = Field( + keys: str = Field( ..., description="""A table for storing user terms that are used to refer to external resources.""", ) - entities: NDArray[Shape["* num_rows"], Any] = Field( + entities: str = Field( ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""", ) - resources: NDArray[Shape["* num_rows"], Any] = Field( + resources: str = Field( ..., description="""A table for mapping user terms (i.e., keys) to resource entities.""", ) - objects: NDArray[Shape["* num_rows"], Any] = Field( + objects: str = Field( ..., description="""A table for identifying which objects in a file contain references to external resources.""", ) - object_keys: NDArray[Shape["* num_rows"], Any] = Field( + object_keys: str = Field( ..., description="""A table for identifying which objects use which keys.""" ) +class ExternalResourcesKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["keys"] = Field("keys") + key: str = Field( + ..., + description="""The user term that maps to one or more resources in the 'resources' table.""", + ) + + +class ExternalResourcesEntities(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["entities"] = Field("entities") + keys_idx: int = Field( + ..., description="""The index to the key in the 'keys' table.""" + ) + resources_idx: int = Field( + ..., description="""The index into the 'resources' table""" + ) + entity_id: str = Field(..., description="""The unique identifier entity.""") + entity_uri: str = Field( + ..., + description="""The URI for the entity this reference applies to. This can be an empty string.""", + ) + + +class ExternalResourcesResources(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["resources"] = Field("resources") + resource: str = Field(..., description="""The name of the resource.""") + resource_uri: str = Field( + ..., description="""The URI for the resource. This can be an empty string.""" + ) + + +class ExternalResourcesObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["objects"] = Field("objects") + object_id: str = Field(..., description="""The UUID for the object.""") + field: str = Field( + ..., + description="""The field of the object. This can be an empty string if the object is a dataset and the field is the dataset values.""", + ) + + +class ExternalResourcesObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["object_keys"] = Field("object_keys") + objects_idx: int = Field( + ..., + description="""The index to the 'objects' table for the object that holds the key.""", + ) + keys_idx: int = Field( + ..., description="""The index to the 'keys' table for the key.""" + ) + + # Model rebuild # see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model ExternalResources.model_rebuild() +ExternalResourcesKeys.model_rebuild() +ExternalResourcesEntities.model_rebuild() +ExternalResourcesResources.model_rebuild() +ExternalResourcesObjects.model_rebuild() +ExternalResourcesObjectKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py index 2ef6106..99871cb 100644 --- a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_1_0/namespace.py @@ -1,23 +1,83 @@ from __future__ import annotations - -import sys +from datetime import datetime, date +from enum import Enum from typing import ( - TYPE_CHECKING, - Any, + Dict, Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, ) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator -from pydantic import BaseModel as BaseModel -from pydantic import ConfigDict, Field +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys if sys.version_info >= (3, 8): - pass + from typing import Literal else: - pass + from typing_extensions import Literal if TYPE_CHECKING: import numpy as np +from .hdmf_experimental_resources import ( + ExternalResources, + ExternalResourcesKeys, + ExternalResourcesEntities, + ExternalResourcesResources, + ExternalResourcesObjects, + ExternalResourcesObjectKeys, +) + +from ...hdmf_common.v1_5_0.hdmf_common_sparse import CSRMatrix + +from ...hdmf_common.v1_5_0.hdmf_common_base import Data, Container, SimpleMultiContainer + +from ...hdmf_common.v1_5_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +from .hdmf_experimental_experimental import EnumData + + metamodel_version = "None" version = "0.1.0" @@ -36,7 +96,7 @@ class ConfiguredBaseModel(BaseModel): object_id: Optional[str] = Field(None, description="Unique UUID for each object") - def __getitem__(self, i: slice | int) -> np.ndarray: + def __getitem__(self, i: slice | int) -> "np.ndarray": if hasattr(self, "array"): return self.array[i] else: diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/__init__.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/__init__.py new file mode 100644 index 0000000..0519ecb --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py new file mode 100644 index 0000000..e3f105b --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_experimental.py @@ -0,0 +1,122 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from ...hdmf_common.v1_8_0.hdmf_common_table import VectorData + + +metamodel_version = "None" +version = "0.5.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class EnumData(VectorData): + """ + Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + elements: Optional[str] = Field( + None, + description="""Reference to the VectorData object that contains the enumerable elements""", + ) + description: Optional[str] = Field( + None, description="""Description of what these vectors represent.""" + ) + array: Optional[ + Union[ + NDArray[Shape["* dim0"], Any], + NDArray[Shape["* dim0, * dim1"], Any], + NDArray[Shape["* dim0, * dim1, * dim2"], Any], + NDArray[Shape["* dim0, * dim1, * dim2, * dim3"], Any], + ] + ] = Field(None) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +EnumData.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_nwb_language.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_nwb_language.py new file mode 100644 index 0000000..f3b68d9 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_nwb_language.py @@ -0,0 +1,94 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +metamodel_version = "None" +version = "None" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py new file mode 100644 index 0000000..393ce36 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/hdmf_experimental_resources.py @@ -0,0 +1,232 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from ...hdmf_common.v1_8_0.hdmf_common_base import Container, Data + + +metamodel_version = "None" +version = "0.5.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +class HERD(Container): + """ + HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(tree_root=True), frozen=True) + name: str = Field(...) + keys: str = Field( + ..., + description="""A table for storing user terms that are used to refer to external resources.""", + ) + files: str = Field( + ..., + description="""A table for storing object ids of files used in external resources.""", + ) + entities: str = Field( + ..., + description="""A table for mapping user terms (i.e., keys) to resource entities.""", + ) + objects: str = Field( + ..., + description="""A table for identifying which objects in a file contain references to external resources.""", + ) + object_keys: str = Field( + ..., description="""A table for identifying which objects use which keys.""" + ) + entity_keys: str = Field( + ..., description="""A table for identifying which keys use which entity.""" + ) + + +class HERDKeys(Data): + """ + A table for storing user terms that are used to refer to external resources. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["keys"] = Field("keys") + key: str = Field( + ..., + description="""The user term that maps to one or more resources in the `resources` table, e.g., \"human\".""", + ) + + +class HERDFiles(Data): + """ + A table for storing object ids of files used in external resources. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["files"] = Field("files") + file_object_id: str = Field( + ..., + description="""The object id (UUID) of a file that contains objects that refers to external resources.""", + ) + + +class HERDEntities(Data): + """ + A table for mapping user terms (i.e., keys) to resource entities. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["entities"] = Field("entities") + entity_id: str = Field( + ..., + description="""The compact uniform resource identifier (CURIE) of the entity, in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'.""", + ) + entity_uri: str = Field( + ..., + description="""The URI for the entity this reference applies to. This can be an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606""", + ) + + +class HERDObjects(Data): + """ + A table for identifying which objects in a file contain references to external resources. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["objects"] = Field("objects") + files_idx: int = Field( + ..., + description="""The row index to the file in the `files` table containing the object.""", + ) + object_id: str = Field(..., description="""The object id (UUID) of the object.""") + object_type: str = Field(..., description="""The data type of the object.""") + relative_path: str = Field( + ..., + description="""The relative path from the data object with the `object_id` to the dataset or attribute with the value(s) that is associated with an external resource. This can be an empty string if the object is a dataset that contains the value(s) that is associated with an external resource.""", + ) + field: str = Field( + ..., + description="""The field within the compound data type using an external resource. This is used only if the dataset or attribute is a compound data type; otherwise this should be an empty string.""", + ) + + +class HERDObjectKeys(Data): + """ + A table for identifying which objects use which keys. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["object_keys"] = Field("object_keys") + objects_idx: int = Field( + ..., + description="""The row index to the object in the `objects` table that holds the key""", + ) + keys_idx: int = Field( + ..., description="""The row index to the key in the `keys` table.""" + ) + + +class HERDEntityKeys(Data): + """ + A table for identifying which keys use which entity. + """ + + linkml_meta: ClassVar[LinkML_Meta] = Field(LinkML_Meta(), frozen=True) + name: Literal["entity_keys"] = Field("entity_keys") + entities_idx: int = Field( + ..., description="""The row index to the entity in the `entities` table.""" + ) + keys_idx: int = Field( + ..., description="""The row index to the key in the `keys` table.""" + ) + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model +HERD.model_rebuild() +HERDKeys.model_rebuild() +HERDFiles.model_rebuild() +HERDEntities.model_rebuild() +HERDObjects.model_rebuild() +HERDObjectKeys.model_rebuild() +HERDEntityKeys.model_rebuild() diff --git a/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py new file mode 100644 index 0000000..96b5902 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/models/pydantic/hdmf_experimental/v0_5_0/namespace.py @@ -0,0 +1,120 @@ +from __future__ import annotations +from datetime import datetime, date +from enum import Enum +from typing import ( + Dict, + Optional, + Any, + Union, + ClassVar, + Annotated, + TypeVar, + List, + TYPE_CHECKING, +) +from pydantic import BaseModel as BaseModel, Field +from pydantic import ConfigDict, BeforeValidator + +from nptyping import ( + Shape, + Float, + Float32, + Double, + Float64, + LongLong, + Int64, + Int, + Int32, + Int16, + Short, + Int8, + UInt, + UInt32, + UInt16, + UInt8, + UInt64, + Number, + String, + Unicode, + Unicode, + Unicode, + String, + Bool, + Datetime64, +) +from nwb_linkml.types import NDArray +import sys + +if sys.version_info >= (3, 8): + from typing import Literal +else: + from typing_extensions import Literal +if TYPE_CHECKING: + import numpy as np + + +from .hdmf_experimental_resources import ( + HERD, + HERDKeys, + HERDFiles, + HERDEntities, + HERDObjects, + HERDObjectKeys, + HERDEntityKeys, +) + +from ...hdmf_common.v1_8_0.hdmf_common_sparse import CSRMatrix + +from ...hdmf_common.v1_8_0.hdmf_common_base import Data, Container, SimpleMultiContainer + +from ...hdmf_common.v1_8_0.hdmf_common_table import ( + VectorData, + VectorIndex, + ElementIdentifiers, + DynamicTableRegion, + DynamicTable, + AlignedDynamicTable, +) + +from .hdmf_experimental_experimental import EnumData + + +metamodel_version = "None" +version = "0.5.0" + + +class ConfiguredBaseModel(BaseModel): + model_config = ConfigDict( + validate_assignment=True, + validate_default=True, + extra="allow", + arbitrary_types_allowed=True, + use_enum_values=True, + ) + hdf5_path: Optional[str] = Field( + None, description="The absolute path that this object is stored in an NWB file" + ) + + object_id: Optional[str] = Field(None, description="Unique UUID for each object") + + def __getitem__(self, i: slice | int) -> "np.ndarray": + if hasattr(self, "array"): + return self.array[i] + else: + return super().__getitem__(i) + + def __setitem__(self, i: slice | int, value: Any): + if hasattr(self, "array"): + self.array[i] = value + else: + super().__setitem__(i, value) + + +class LinkML_Meta(BaseModel): + """Extra LinkML Metadata stored as a class attribute""" + + tree_root: bool = False + + +# Model rebuild +# see https://pydantic-docs.helpmanual.io/usage/models/#rebuilding-a-model diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml new file mode 100644 index 0000000..3db31ea --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.base.yaml @@ -0,0 +1,407 @@ +name: core.nwb.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.base +version: 2.7.0 +imports: +- ../../hdmf_common/v1_8_0/namespace +- ../../hdmf_common/v1_8_0/namespace +- core.nwb.language +default_prefix: core.nwb.base/ +classes: + NWBData: + name: NWBData + description: An abstract data type for a dataset. + is_a: Data + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + TimeSeriesReferenceVectorData: + name: TimeSeriesReferenceVectorData + description: Column storing references to a TimeSeries (rows). For each TimeSeries + this VectorData column stores the start_index and count to indicate the range + in time to be selected as well as an object reference to the TimeSeries. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(timeseries) + identifier: true + range: string + required: true + idx_start: + name: idx_start + description: Start index into the TimeSeries 'data' and 'timestamp' datasets + of the referenced TimeSeries. The first dimension of those arrays is always + time. + multivalued: false + range: int32 + required: true + count: + name: count + description: Number of data samples available in this time series, during + this epoch + multivalued: false + range: int32 + required: true + timeseries: + name: timeseries + description: The TimeSeries that this index applies to + multivalued: false + range: TimeSeries + required: true + tree_root: true + Image: + name: Image + description: An abstract data type for an image. Shape can be 2-D (x, y), or 3-D + where the third dimension can have three or four elements, e.g. (x, y, (r, g, + b)) or (x, y, (r, g, b, a)). + is_a: NWBData + attributes: + name: + name: name + identifier: true + range: string + required: true + resolution: + name: resolution + description: Pixel resolution of the image, in pixels per centimeter. + range: float32 + description: + name: description + description: Description of the image. + range: text + array: + name: array + range: numeric + any_of: + - array: + dimensions: + - alias: x + - alias: y + - array: + dimensions: + - alias: x + - alias: y + - alias: r, g, b + exact_cardinality: 3 + - array: + dimensions: + - alias: x + - alias: y + - alias: r, g, b, a + exact_cardinality: 4 + tree_root: true + ImageReferences: + name: ImageReferences + description: Ordered dataset of references to Image objects. + is_a: NWBData + attributes: + name: + name: name + identifier: true + range: string + required: true + image: + name: image + description: Ordered dataset of references to Image objects. + multivalued: true + range: Image + required: true + tree_root: true + NWBContainer: + name: NWBContainer + description: An abstract data type for a generic container storing collections + of data and metadata. Base type for all data and metadata containers. + is_a: Container + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + NWBDataInterface: + name: NWBDataInterface + description: An abstract data type for a generic container storing collections + of data, as opposed to metadata. + is_a: NWBContainer + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + TimeSeries: + name: TimeSeries + description: General purpose time series. + is_a: NWBDataInterface + attributes: + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of the time series. + range: text + comments: + name: comments + description: Human-readable comments about the TimeSeries. This second descriptive + field can be used to store additional information, or descriptive information + if the primary description field is populated with a computer-readable string. + range: text + data: + name: data + description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first + dimension should always represent time. This can also be used to store binary + data (e.g., image frames). This can also be a link to data stored in an + external file. + multivalued: false + range: TimeSeries__data + required: true + starting_time: + name: starting_time + description: Timestamp of the first sample in seconds. When timestamps are + uniformly spaced, the timestamp of the first sample can be specified and + all subsequent ones calculated from the sampling rate attribute. + multivalued: false + range: TimeSeries__starting_time + required: false + timestamps: + name: timestamps + description: Timestamps for samples stored in data, in seconds, relative to + the common experiment master-clock stored in NWBFile.timestamps_reference_time. + multivalued: false + array: + dimensions: + - alias: num_times + range: float64 + required: false + control: + name: control + description: Numerical labels that apply to each time point in data for the + purpose of querying and slicing data by these values. If present, the length + of this array should be the same size as the first dimension of data. + multivalued: false + array: + dimensions: + - alias: num_times + range: uint8 + required: false + control_description: + name: control_description + description: Description of each control value. Must be present if control + is present. If present, control_description[0] should describe time points + where control == 0. + multivalued: false + array: + dimensions: + - alias: num_control_values + range: text + required: false + sync: + name: sync + description: Lab-specific time and sync information as provided directly from + hardware devices and that is necessary for aligning all acquired time information + to a common timebase. The timestamp array stores time in the common timebase. + This group will usually only be populated in TimeSeries that are stored + external to the NWB file, in files storing raw data. Once timestamp data + is calculated, the contents of 'sync' are mostly for archival purposes. + multivalued: false + range: TimeSeries__sync + required: false + tree_root: true + TimeSeries__data: + name: TimeSeries__data + description: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension + should always represent time. This can also be used to store binary data (e.g., + image frames). This can also be a link to data stored in an external file. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + conversion: + name: conversion + description: Scalar to multiply each element in data to convert it to the + specified 'unit'. If the data are stored in acquisition system units or + other units that require a conversion to be interpretable, multiply the + data by 'conversion' to convert the data to the specified 'unit'. e.g. if + the data acquisition system stores values in this object as signed 16-bit + integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V + to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' + multiplier to get from raw data acquisition values to recorded volts is + 2.5/32768/8000 = 9.5367e-9. + range: float32 + offset: + name: offset + description: Scalar to add to the data after scaling by 'conversion' to finalize + its coercion to the specified 'unit'. Two common examples of this include + (a) data stored in an unsigned type that requires a shift after scaling + to re-center the data, and (b) specialized recording devices that naturally + cause a scalar offset with respect to the true units. + range: float32 + resolution: + name: resolution + description: Smallest meaningful difference between values in data, stored + in the specified by unit, e.g., the change in value of the least significant + bit, or a larger number if signal noise is known to be present. If unknown, + use -1.0. + range: float32 + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + continuity: + name: continuity + description: Optionally describe the continuity of the data. Can be "continuous", + "instantaneous", or "step". For example, a voltage trace would be "continuous", + because samples are recorded from a continuous process. An array of lick + times would be "instantaneous", because the data represents distinct moments + in time. Times of image presentations would be "step" because the picture + remains the same until the next timepoint. This field is optional, but is + useful in providing information about the underlying data. It may inform + the way this data is interpreted, the way it is visualized, and what analysis + methods are applicable. + range: text + array: + name: array + range: AnyType + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_DIM2 + - array: + dimensions: + - alias: num_times + - alias: num_DIM2 + - alias: num_DIM3 + - array: + dimensions: + - alias: num_times + - alias: num_DIM2 + - alias: num_DIM3 + - alias: num_DIM4 + TimeSeries__starting_time: + name: TimeSeries__starting_time + description: Timestamp of the first sample in seconds. When timestamps are uniformly + spaced, the timestamp of the first sample can be specified and all subsequent + ones calculated from the sampling rate attribute. + attributes: + name: + name: name + ifabsent: string(starting_time) + identifier: true + range: string + required: true + equals_string: starting_time + rate: + name: rate + description: Sampling rate, in Hz. + range: float32 + unit: + name: unit + description: Unit of measurement for time, which is fixed to 'seconds'. + range: text + value: + name: value + range: float64 + required: true + TimeSeries__sync: + name: TimeSeries__sync + description: Lab-specific time and sync information as provided directly from + hardware devices and that is necessary for aligning all acquired time information + to a common timebase. The timestamp array stores time in the common timebase. + This group will usually only be populated in TimeSeries that are stored external + to the NWB file, in files storing raw data. Once timestamp data is calculated, + the contents of 'sync' are mostly for archival purposes. + attributes: + name: + name: name + ifabsent: string(sync) + identifier: true + range: string + required: true + equals_string: sync + ProcessingModule: + name: ProcessingModule + description: A collection of processed data. + is_a: NWBContainer + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: NWBDataInterface + - range: DynamicTable + tree_root: true + Images: + name: Images + description: A collection of images with an optional way to specify the order + of the images using the "order_of_images" dataset. An order must be specified + if the images are referenced by index, e.g., from an IndexSeries. + is_a: NWBDataInterface + attributes: + name: + name: name + ifabsent: string(Images) + identifier: true + range: string + required: true + description: + name: description + description: Description of this collection of images. + range: text + image: + name: image + description: Images stored in this collection. + multivalued: true + range: Image + required: true + order_of_images: + name: order_of_images + description: Ordered dataset of references to Image objects stored in the + parent group. Each Image object in the Images group should be stored once + and only once, so the dataset should have the same length as the number + of images. + multivalued: false + range: Images__order_of_images + required: false + tree_root: true + Images__order_of_images: + name: Images__order_of_images + description: Ordered dataset of references to Image objects stored in the parent + group. Each Image object in the Images group should be stored once and only + once, so the dataset should have the same length as the number of images. + is_a: ImageReferences + attributes: + name: + name: name + ifabsent: string(order_of_images) + identifier: true + range: string + required: true + equals_string: order_of_images diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml new file mode 100644 index 0000000..07d2e26 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.behavior.yaml @@ -0,0 +1,197 @@ +name: core.nwb.behavior +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.behavior +version: 2.7.0 +imports: +- core.nwb.base +- core.nwb.misc +- core.nwb.language +default_prefix: core.nwb.behavior/ +classes: + SpatialSeries: + name: SpatialSeries + description: 'Direction, e.g., of gaze or travel, or position. The TimeSeries::data + field is a 2D array storing position or direction relative to some reference + frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries + has a text dataset reference_frame that indicates the zero-position, or the + zero-axes for direction. For example, if representing gaze direction, ''straight-ahead'' + might be a specific pixel on the monitor, or some other point in space. For + position data, the 0,0 point might be the top-left corner of an enclosure, as + viewed from the tracking camera. The unit of data will indicate how to interpret + SpatialSeries values.' + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: 1-D or 2-D array storing position or direction relative to some + reference frame. + multivalued: false + range: SpatialSeries__data + required: true + reference_frame: + name: reference_frame + description: Description defining what exactly 'straight-ahead' means. + multivalued: false + range: text + required: false + tree_root: true + SpatialSeries__data: + name: SpatialSeries__data + description: 1-D or 2-D array storing position or direction relative to some reference + frame. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + unit: + name: unit + description: Base unit of measurement for working with the data. The default + value is 'meters'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + array: + name: array + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: x + exact_cardinality: 1 + - array: + dimensions: + - alias: num_times + - alias: x,y + exact_cardinality: 2 + - array: + dimensions: + - alias: num_times + - alias: x,y,z + exact_cardinality: 3 + BehavioralEpochs: + name: BehavioralEpochs + description: TimeSeries for storing behavioral epochs. The objective of this + and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) + is to provide generic hooks for software tools/scripts. This allows a tool/script + to take the output one specific interface (e.g., UnitTimes) and plot that data + relative to another data modality (e.g., behavioral events) without having to + define all possible modalities in advance. Declaring one of these interfaces + means that one or more TimeSeries of the specified type is published. These + TimeSeries should reside in a group having the same name as the interface. For + example, if a BehavioralTimeSeries interface is declared, the module will have + one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. + BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular + events. BehavioralTimeSeries is for continuous data. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: IntervalSeries + tree_root: true + BehavioralEvents: + name: BehavioralEvents + description: TimeSeries for storing behavioral events. See description of BehavioralEpochs + for more details. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries + tree_root: true + BehavioralTimeSeries: + name: BehavioralTimeSeries + description: TimeSeries for storing Behavoioral time series data. See description + of BehavioralEpochs for more details. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries + tree_root: true + PupilTracking: + name: PupilTracking + description: Eye-tracking data, representing pupil size. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries + tree_root: true + EyeTracking: + name: EyeTracking + description: Eye-tracking data, representing direction of gaze. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries + tree_root: true + CompassDirection: + name: CompassDirection + description: With a CompassDirection interface, a module publishes a SpatialSeries + object representing a floating point value for theta. The SpatialSeries::reference_frame + field should indicate what direction corresponds to 0 and which is the direction + of rotation (this should be clockwise). The si_unit for the SpatialSeries should + be radians or degrees. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries + tree_root: true + Position: + name: Position + description: Position data, whether along the x, x/y or x/y/z axis. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpatialSeries + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml new file mode 100644 index 0000000..f668388 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.device.yaml @@ -0,0 +1,36 @@ +name: core.nwb.device +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.device +version: 2.7.0 +imports: +- core.nwb.base +- core.nwb.language +default_prefix: core.nwb.device/ +classes: + Device: + name: Device + description: Metadata about a data acquisition device, e.g., recording system, + electrode, microscope. + is_a: NWBContainer + attributes: + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of the device (e.g., model, firmware version, processing + software version, etc.) as free-form text. + range: text + manufacturer: + name: manufacturer + description: The name of the manufacturer of the device. + range: text + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml new file mode 100644 index 0000000..e8e4242 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ecephys.yaml @@ -0,0 +1,451 @@ +name: core.nwb.ecephys +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.ecephys +version: 2.7.0 +imports: +- core.nwb.base +- ../../hdmf_common/v1_8_0/namespace +- core.nwb.device +- core.nwb.language +default_prefix: core.nwb.ecephys/ +classes: + ElectricalSeries: + name: ElectricalSeries + description: A time series of acquired voltage data from extracellular recordings. + The data field is an int or float array storing data in volts. The first dimension + should always represent time. The second dimension, if present, should represent + channels. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + filtering: + name: filtering + description: Filtering applied to all channels of the data. For example, if + this ElectricalSeries represents high-pass-filtered data (also known as + AP Band), then this value could be "High-pass 4-pole Bessel filter at 500 + Hz". If this ElectricalSeries represents low-pass-filtered LFP data and + the type of filter is unknown, then this value could be "Low-pass filter + at 300 Hz". If a non-standard filter type is used, provide as much detail + about the filter properties as possible. + range: text + data: + name: data + description: Recorded voltage data. + multivalued: false + range: numeric + required: true + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_channels + - array: + dimensions: + - alias: num_times + - alias: num_channels + - alias: num_samples + electrodes: + name: electrodes + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + multivalued: false + range: ElectricalSeries__electrodes + required: true + channel_conversion: + name: channel_conversion + description: Channel-specific conversion factor. Multiply the data in the + 'data' dataset by these values along the channel axis (as indicated by axis + attribute) AND by the global conversion factor in the 'conversion' attribute + of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion + * channel_conversion. This approach allows for both global and per-channel + data conversion factors needed to support the storage of electrical recordings + as native values generated by data acquisition systems. If this dataset + is not present, then there is no channel-specific conversion factor, i.e. + it is 1 for all channels. + multivalued: false + array: + dimensions: + - alias: num_channels + range: float32 + required: false + tree_root: true + ElectricalSeries__electrodes: + name: ElectricalSeries__electrodes + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + is_a: DynamicTableRegion + attributes: + name: + name: name + ifabsent: string(electrodes) + identifier: true + range: string + required: true + equals_string: electrodes + SpikeEventSeries: + name: SpikeEventSeries + description: 'Stores snapshots/snippets of recorded spike events (i.e., threshold + crossings). This may also be raw data, as reported by ephys hardware. If so, + the TimeSeries::description field should describe how events were detected. + All SpikeEventSeries should reside in a module (under EventWaveform interface) + even if the spikes were reported and stored by hardware. All events span the + same recording channels and store snapshots of equal duration. TimeSeries::data + array structure: [num events] [num channels] [num samples] (or [num events] + [num samples] for single electrode).' + is_a: ElectricalSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Spike waveforms. + multivalued: false + range: numeric + required: true + any_of: + - array: + dimensions: + - alias: num_events + - alias: num_samples + - array: + dimensions: + - alias: num_events + - alias: num_channels + - alias: num_samples + timestamps: + name: timestamps + description: Timestamps for samples stored in data, in seconds, relative to + the common experiment master-clock stored in NWBFile.timestamps_reference_time. + Timestamps are required for the events. Unlike for TimeSeries, timestamps + are required for SpikeEventSeries and are thus re-specified here. + multivalued: false + array: + dimensions: + - alias: num_times + range: float64 + required: true + tree_root: true + FeatureExtraction: + name: FeatureExtraction + description: Features, such as PC1 and PC2, that are extracted from signals stored + in a SpikeEventSeries or other source. + is_a: NWBDataInterface + attributes: + name: + name: name + ifabsent: string(FeatureExtraction) + identifier: true + range: string + required: true + description: + name: description + description: Description of features (eg, ''PC1'') for each of the extracted + features. + multivalued: false + array: + dimensions: + - alias: num_features + range: text + required: true + features: + name: features + description: Multi-dimensional array of features extracted from each event. + multivalued: false + array: + dimensions: + - alias: num_events + - alias: num_channels + - alias: num_features + range: float32 + required: true + times: + name: times + description: Times of events that features correspond to (can be a link). + multivalued: false + array: + dimensions: + - alias: num_events + range: float64 + required: true + electrodes: + name: electrodes + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + multivalued: false + range: FeatureExtraction__electrodes + required: true + tree_root: true + FeatureExtraction__electrodes: + name: FeatureExtraction__electrodes + description: DynamicTableRegion pointer to the electrodes that this time series + was generated from. + is_a: DynamicTableRegion + attributes: + name: + name: name + ifabsent: string(electrodes) + identifier: true + range: string + required: true + equals_string: electrodes + EventDetection: + name: EventDetection + description: Detected spike events from voltage trace(s). + is_a: NWBDataInterface + attributes: + name: + name: name + ifabsent: string(EventDetection) + identifier: true + range: string + required: true + detection_method: + name: detection_method + description: Description of how events were detected, such as voltage threshold, + or dV/dT threshold, as well as relevant values. + multivalued: false + range: text + required: true + source_idx: + name: source_idx + description: Indices (zero-based) into source ElectricalSeries::data array + corresponding to time of event. ''description'' should define what is meant + by time of event (e.g., .25 ms before action potential peak, zero-crossing + time, etc). The index points to each event from the raw data. + multivalued: false + array: + dimensions: + - alias: num_events + range: int32 + required: true + times: + name: times + description: Timestamps of events, in seconds. + multivalued: false + array: + dimensions: + - alias: num_events + range: float64 + required: true + tree_root: true + EventWaveform: + name: EventWaveform + description: Represents either the waveforms of detected events, as extracted + from a raw data trace in /acquisition, or the event waveforms that were stored + during experiment acquisition. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: SpikeEventSeries + tree_root: true + FilteredEphys: + name: FilteredEphys + description: Electrophysiology data from one or more channels that has been subjected + to filtering. Examples of filtered data include Theta and Gamma (LFP has its + own interface). FilteredEphys modules publish an ElectricalSeries for each filtered + channel or set of channels. The name of each ElectricalSeries is arbitrary but + should be informative. The source of the filtered data, whether this is from + analysis of another time series or as acquired by hardware, should be noted + in each's TimeSeries::description field. There is no assumed 1::1 correspondence + between filtered ephys signals and electrodes, as a single signal can apply + to many nearby electrodes, and one electrode may have different filtered (e.g., + theta and/or gamma) signals represented. Filter properties should be noted in + the ElectricalSeries 'filtering' attribute. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries + tree_root: true + LFP: + name: LFP + description: LFP data from one or more channels. The electrode map in each published + ElectricalSeries will identify which channels are providing LFP data. Filter + properties should be noted in the ElectricalSeries 'filtering' attribute. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ElectricalSeries + tree_root: true + ElectrodeGroup: + name: ElectrodeGroup + description: A physical grouping of electrodes, e.g. a shank of an array. + is_a: NWBContainer + attributes: + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of this electrode group. + range: text + location: + name: location + description: Location of electrode group. Specify the area, layer, comments + on estimation of area/layer, etc. Use standard atlas names for anatomical + regions when possible. + range: text + position: + name: position + description: stereotaxic or common framework coordinates + multivalued: false + range: ElectrodeGroup__position + required: false + tree_root: true + ElectrodeGroup__position: + name: ElectrodeGroup__position + description: stereotaxic or common framework coordinates + attributes: + name: + name: name + ifabsent: string(position) + identifier: true + range: string + required: true + equals_string: position + x: + name: x + description: x coordinate + multivalued: false + range: float32 + required: false + y: + name: y + description: y coordinate + multivalued: false + range: float32 + required: false + z: + name: z + description: z coordinate + multivalued: false + range: float32 + required: false + ClusterWaveforms: + name: ClusterWaveforms + description: DEPRECATED The mean waveform shape, including standard deviation, + of the different clusters. Ideally, the waveform analysis should be performed + on data that is only high-pass filtered. This is a separate module because it + is expected to require updating. For example, IMEC probes may require different + storage requirements to store/display mean waveforms, requiring a new interface + or an extension of this one. + is_a: NWBDataInterface + attributes: + name: + name: name + ifabsent: string(ClusterWaveforms) + identifier: true + range: string + required: true + waveform_filtering: + name: waveform_filtering + description: Filtering applied to data before generating mean/sd + multivalued: false + range: text + required: true + waveform_mean: + name: waveform_mean + description: The mean waveform for each cluster, using the same indices for + each wave as cluster numbers in the associated Clustering module (i.e, cluster + 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence + should be empty (e.g., zero- filled) + multivalued: false + array: + dimensions: + - alias: num_clusters + - alias: num_samples + range: float32 + required: true + waveform_sd: + name: waveform_sd + description: Stdev of waveforms for each cluster, using the same indices as + in mean + multivalued: false + array: + dimensions: + - alias: num_clusters + - alias: num_samples + range: float32 + required: true + tree_root: true + Clustering: + name: Clustering + description: DEPRECATED Clustered spike data, whether from automatic clustering + tools (e.g., klustakwik) or as a result of manual sorting. + is_a: NWBDataInterface + attributes: + name: + name: name + ifabsent: string(Clustering) + identifier: true + range: string + required: true + description: + name: description + description: Description of clusters or clustering, (e.g. cluster 0 is noise, + clusters curated using Klusters, etc) + multivalued: false + range: text + required: true + num: + name: num + description: Cluster number of each event + multivalued: false + array: + dimensions: + - alias: num_events + range: int32 + required: true + peak_over_rms: + name: peak_over_rms + description: Maximum ratio of waveform peak to RMS on any channel in the cluster + (provides a basic clustering metric). + multivalued: false + array: + dimensions: + - alias: num_clusters + range: float32 + required: true + times: + name: times + description: Times of clustered events, in seconds. This may be a link to + times field in associated FeatureExtraction module. + multivalued: false + array: + dimensions: + - alias: num_events + range: float64 + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml new file mode 100644 index 0000000..84a2c50 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.epoch.yaml @@ -0,0 +1,97 @@ +name: core.nwb.epoch +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.epoch +version: 2.7.0 +imports: +- ../../hdmf_common/v1_8_0/namespace +- core.nwb.base +- core.nwb.language +default_prefix: core.nwb.epoch/ +classes: + TimeIntervals: + name: TimeIntervals + description: A container for aggregating epoch data and the TimeSeries that each + epoch applies to. + is_a: DynamicTable + attributes: + name: + name: name + identifier: true + range: string + required: true + start_time: + name: start_time + description: Start time of epoch, in seconds. + multivalued: true + range: float32 + stop_time: + name: stop_time + description: Stop time of epoch, in seconds. + multivalued: true + range: float32 + tags: + name: tags + description: User-defined tags that identify or categorize events. + multivalued: true + range: text + tags_index: + name: tags_index + description: Index for tags. + multivalued: false + range: TimeIntervals__tags_index + required: false + timeseries: + name: timeseries + description: An index into a TimeSeries object. + multivalued: false + range: TimeIntervals__timeseries + required: false + timeseries_index: + name: timeseries_index + description: Index for timeseries. + multivalued: false + range: TimeIntervals__timeseries_index + required: false + tree_root: true + TimeIntervals__tags_index: + name: TimeIntervals__tags_index + description: Index for tags. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(tags_index) + identifier: true + range: string + required: true + equals_string: tags_index + TimeIntervals__timeseries: + name: TimeIntervals__timeseries + description: An index into a TimeSeries object. + is_a: TimeSeriesReferenceVectorData + attributes: + name: + name: name + ifabsent: string(timeseries) + identifier: true + range: string + required: true + equals_string: timeseries + TimeIntervals__timeseries_index: + name: TimeIntervals__timeseries_index + description: Index for timeseries. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(timeseries_index) + identifier: true + range: string + required: true + equals_string: timeseries_index diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml new file mode 100644 index 0000000..037783a --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.file.yaml @@ -0,0 +1,759 @@ +name: core.nwb.file +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.file +version: 2.7.0 +imports: +- core.nwb.base +- ../../hdmf_common/v1_8_0/namespace +- core.nwb.device +- core.nwb.ecephys +- core.nwb.icephys +- core.nwb.ogen +- core.nwb.ophys +- core.nwb.epoch +- core.nwb.misc +- core.nwb.language +default_prefix: core.nwb.file/ +classes: + ScratchData: + name: ScratchData + description: Any one-off datasets + is_a: NWBData + attributes: + name: + name: name + identifier: true + range: string + required: true + notes: + name: notes + description: Any notes the user has about the dataset being stored + range: text + tree_root: true + NWBFile: + name: NWBFile + description: An NWB file storing cellular-based neurophysiology data from a single + experimental session. + is_a: NWBContainer + attributes: + name: + name: name + ifabsent: string(root) + identifier: true + range: string + required: true + equals_string: root + nwb_version: + name: nwb_version + description: File version string. Use semantic versioning, e.g. 1.2.1. This + will be the name of the format with trailing major, minor and patch numbers. + range: text + file_create_date: + name: file_create_date + description: 'A record of the date the file was created and of subsequent + modifications. The date is stored in UTC with local timezone offset as ISO + 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored + in UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds. + The file can be created after the experiment was run, so this may differ + from the experiment start time. Each modification to the nwb file adds a + new entry to the array.' + multivalued: false + array: + dimensions: + - alias: num_modifications + range: isodatetime + required: true + identifier: + name: identifier + description: A unique text identifier for the file. For example, concatenated + lab name, file creation date/time and experimentalist, or a hash of these + and/or other values. The goal is that the string should be unique to all + other files. + multivalued: false + range: text + required: true + session_description: + name: session_description + description: A description of the experimental session and data in the file. + multivalued: false + range: text + required: true + session_start_time: + name: session_start_time + description: 'Date and time of the experiment/session start. The date is stored + in UTC with local timezone offset as ISO 8601 extended formatted string: + 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone + offset. Date accuracy is up to milliseconds.' + multivalued: false + range: isodatetime + required: true + timestamps_reference_time: + name: timestamps_reference_time + description: 'Date and time corresponding to time zero of all timestamps. + The date is stored in UTC with local timezone offset as ISO 8601 extended + formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end + in "Z" with no timezone offset. Date accuracy is up to milliseconds. All + times stored in the file use this time as reference (i.e., time zero).' + multivalued: false + range: isodatetime + required: true + acquisition: + name: acquisition + description: Data streams recorded from the system, including ephys, ophys, + tracking, etc. This group should be read-only after the experiment is completed + and timestamps are corrected to a common timebase. The data stored here + may be links to raw data stored in external NWB files. This will allow keeping + bulky raw data out of the file while preserving the option of keeping some/all + in the file. Acquired data includes tracking and experimental data streams + (i.e., everything measured from the system). If bulky data is stored in + the /acquisition group, the data can exist in a separate NWB file that is + linked to by the file being used for processing and analysis. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: NWBDataInterface + - range: DynamicTable + analysis: + name: analysis + description: Lab-specific and custom scientific analysis of data. There is + no defined format for the content of this group - the format is up to the + individual user/lab. To facilitate sharing analysis data between labs, the + contents here should be stored in standard types (e.g., neurodata_types) + and appropriately documented. The file can store lab-specific and custom + data analysis without restriction on its form or schema, reducing data formatting + restrictions on end users. Such data should be placed in the analysis group. + The analysis data should be documented so that it could be shared with other + labs. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: NWBContainer + - range: DynamicTable + scratch: + name: scratch + description: A place to store one-off analysis results. Data placed here is + not intended for sharing. By placing data here, users acknowledge that there + is no guarantee that their data meets any standard. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: NWBContainer + - range: DynamicTable + processing: + name: processing + description: The home for ProcessingModules. These modules perform intermediate + analysis of data that is necessary to perform before scientific analysis. + Examples include spike clustering, extracting position from tracking data, + stitching together image slices. ProcessingModules can be large and express + many data sets from relatively complex analysis (e.g., spike detection and + clustering) or small, representing extraction of position information from + tracking video, or even binary lick/no-lick decisions. Common software tools + (e.g., klustakwik, MClust) are expected to read/write data here. 'Processing' + refers to intermediate analysis of the acquired data to make it more amenable + to scientific analysis. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ProcessingModule + stimulus: + name: stimulus + description: Data pushed into the system (eg, video stimulus, sound, voltage, + etc) and secondary representations of that data (eg, measurements of something + used as a stimulus). This group should be made read-only after experiment + complete and timestamps are corrected to common timebase. Stores both presented + stimuli and stimulus templates, the latter in case the same stimulus is + presented multiple times, or is pulled from an external stimulus library. + Stimuli are here defined as any signal that is pushed into the system as + part of the experiment (eg, sound, video, voltage, etc). Many different + experiments can use the same stimuli, and stimuli can be re-used during + an experiment. The stimulus group is organized so that one version of template + stimuli can be stored and these be used multiple times. These templates + can exist in the present file or can be linked to a remote library file. + multivalued: false + range: NWBFile__stimulus + required: true + general: + name: general + description: Experimental metadata, including protocol, notes and description + of hardware device(s). The metadata stored in this section should be used + to describe the experiment. Metadata necessary for interpreting the data + is stored with the data. General experimental metadata, including animal + strain, experimental protocols, experimenter, devices, etc, are stored under + 'general'. Core metadata (e.g., that required to interpret data fields) + is stored with the data itself, and implicitly defined by the file specification + (e.g., time is in seconds). The strategy used here for storing non-core + metadata is to use free-form text fields, such as would appear in sentences + or paragraphs from a Methods section. Metadata fields are text to enable + them to be more general, for example to represent ranges instead of numerical + values. Machine-readable metadata is stored as attributes to these free-form + datasets. All entries in the below table are to be included when data is + present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) + should not be created unless there is data to store within them. + multivalued: false + range: NWBFile__general + required: true + intervals: + name: intervals + description: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during + an experiment, or epochs (see epochs subgroup) deriving from analysis of + data. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeIntervals + - range: TimeIntervals + - range: TimeIntervals + - range: TimeIntervals + units: + name: units + description: Data about sorted spike units. + multivalued: false + range: Units + required: false + tree_root: true + NWBFile__stimulus: + name: NWBFile__stimulus + description: Data pushed into the system (eg, video stimulus, sound, voltage, + etc) and secondary representations of that data (eg, measurements of something + used as a stimulus). This group should be made read-only after experiment complete + and timestamps are corrected to common timebase. Stores both presented stimuli + and stimulus templates, the latter in case the same stimulus is presented multiple + times, or is pulled from an external stimulus library. Stimuli are here defined + as any signal that is pushed into the system as part of the experiment (eg, + sound, video, voltage, etc). Many different experiments can use the same stimuli, + and stimuli can be re-used during an experiment. The stimulus group is organized + so that one version of template stimuli can be stored and these be used multiple + times. These templates can exist in the present file or can be linked to a remote + library file. + attributes: + name: + name: name + ifabsent: string(stimulus) + identifier: true + range: string + required: true + equals_string: stimulus + presentation: + name: presentation + description: Stimuli presented during the experiment. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries + - range: NWBDataInterface + - range: DynamicTable + templates: + name: templates + description: Template stimuli. Timestamps in templates are based on stimulus + design and are relative to the beginning of the stimulus. When templates + are used, the stimulus instances must convert presentation times to the + experiment`s time reference frame. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: TimeSeries + - range: Images + NWBFile__general: + name: NWBFile__general + description: Experimental metadata, including protocol, notes and description + of hardware device(s). The metadata stored in this section should be used to + describe the experiment. Metadata necessary for interpreting the data is stored + with the data. General experimental metadata, including animal strain, experimental + protocols, experimenter, devices, etc, are stored under 'general'. Core metadata + (e.g., that required to interpret data fields) is stored with the data itself, + and implicitly defined by the file specification (e.g., time is in seconds). + The strategy used here for storing non-core metadata is to use free-form text + fields, such as would appear in sentences or paragraphs from a Methods section. + Metadata fields are text to enable them to be more general, for example to represent + ranges instead of numerical values. Machine-readable metadata is stored as attributes + to these free-form datasets. All entries in the below table are to be included + when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology + experiment) should not be created unless there is data to store within them. + attributes: + name: + name: name + ifabsent: string(general) + identifier: true + range: string + required: true + equals_string: general + data_collection: + name: data_collection + description: Notes about data collection and analysis. + multivalued: false + range: text + required: false + experiment_description: + name: experiment_description + description: General description of the experiment. + multivalued: false + range: text + required: false + experimenter: + name: experimenter + description: Name of person(s) who performed the experiment. Can also specify + roles of different people involved. + multivalued: false + array: + dimensions: + - alias: num_experimenters + range: text + required: false + institution: + name: institution + description: Institution(s) where experiment was performed. + multivalued: false + range: text + required: false + keywords: + name: keywords + description: Terms to search over. + multivalued: false + array: + dimensions: + - alias: num_keywords + range: text + required: false + lab: + name: lab + description: Laboratory where experiment was performed. + multivalued: false + range: text + required: false + notes: + name: notes + description: Notes about the experiment. + multivalued: false + range: text + required: false + pharmacology: + name: pharmacology + description: Description of drugs used, including how and when they were administered. + Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. + multivalued: false + range: text + required: false + protocol: + name: protocol + description: Experimental protocol, if applicable. e.g., include IACUC protocol + number. + multivalued: false + range: text + required: false + related_publications: + name: related_publications + description: Publication information. PMID, DOI, URL, etc. + multivalued: false + array: + dimensions: + - alias: num_publications + range: text + required: false + session_id: + name: session_id + description: Lab-specific ID for the session. + multivalued: false + range: text + required: false + slices: + name: slices + description: Description of slices, including information about preparation + thickness, orientation, temperature, and bath solution. + multivalued: false + range: text + required: false + source_script: + name: source_script + description: Script file or link to public source code used to create this + NWB file. + multivalued: false + range: NWBFile__general__source_script + required: false + stimulus: + name: stimulus + description: Notes about stimuli, such as how and where they were presented. + multivalued: false + range: text + required: false + surgery: + name: surgery + description: Narrative description about surgery/surgeries, including date(s) + and who performed surgery. + multivalued: false + range: text + required: false + virus: + name: virus + description: Information about virus(es) used in experiments, including virus + ID, source, date made, injection location, volume, etc. + multivalued: false + range: text + required: false + lab_meta_data: + name: lab_meta_data + description: Place-holder than can be extended so that lab-specific meta-data + can be placed in /general. + multivalued: true + range: LabMetaData + required: false + devices: + name: devices + description: Description of hardware devices used during experiment, e.g., + monitors, ADC boards, microscopes, etc. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Device + subject: + name: subject + description: Information about the animal or person from which the data was + measured. + multivalued: false + range: Subject + required: false + extracellular_ephys: + name: extracellular_ephys + description: Metadata related to extracellular electrophysiology. + multivalued: false + range: NWBFile__general__extracellular_ephys + required: false + intracellular_ephys: + name: intracellular_ephys + description: Metadata related to intracellular electrophysiology. + multivalued: false + range: NWBFile__general__intracellular_ephys + required: false + optogenetics: + name: optogenetics + description: Metadata describing optogenetic stimuluation. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: OptogeneticStimulusSite + optophysiology: + name: optophysiology + description: Metadata related to optophysiology. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ImagingPlane + NWBFile__general__source_script: + name: NWBFile__general__source_script + description: Script file or link to public source code used to create this NWB + file. + attributes: + name: + name: name + ifabsent: string(source_script) + identifier: true + range: string + required: true + equals_string: source_script + file_name: + name: file_name + description: Name of script file. + range: text + value: + name: value + range: text + required: true + NWBFile__general__extracellular_ephys: + name: NWBFile__general__extracellular_ephys + description: Metadata related to extracellular electrophysiology. + attributes: + name: + name: name + ifabsent: string(extracellular_ephys) + identifier: true + range: string + required: true + equals_string: extracellular_ephys + electrode_group: + name: electrode_group + description: Physical group of electrodes. + multivalued: true + range: ElectrodeGroup + required: false + electrodes: + name: electrodes + description: A table of all electrodes (i.e. channels) used for recording. + multivalued: false + range: NWBFile__general__extracellular_ephys__electrodes + required: false + NWBFile__general__extracellular_ephys__electrodes: + name: NWBFile__general__extracellular_ephys__electrodes + description: A table of all electrodes (i.e. channels) used for recording. + is_a: DynamicTable + attributes: + name: + name: name + ifabsent: string(electrodes) + identifier: true + range: string + required: true + equals_string: electrodes + x: + name: x + description: x coordinate of the channel location in the brain (+x is posterior). + multivalued: true + range: float32 + y: + name: y + description: y coordinate of the channel location in the brain (+y is inferior). + multivalued: true + range: float32 + z: + name: z + description: z coordinate of the channel location in the brain (+z is right). + multivalued: true + range: float32 + imp: + name: imp + description: Impedance of the channel, in ohms. + multivalued: true + range: float32 + location: + name: location + description: Location of the electrode (channel). Specify the area, layer, + comments on estimation of area/layer, stereotaxic coordinates if in vivo, + etc. Use standard atlas names for anatomical regions when possible. + multivalued: true + range: text + filtering: + name: filtering + description: Description of hardware filtering, including the filter name + and frequency cutoffs. + multivalued: true + range: text + group: + name: group + description: Reference to the ElectrodeGroup this electrode is a part of. + multivalued: true + range: ElectrodeGroup + group_name: + name: group_name + description: Name of the ElectrodeGroup this electrode is a part of. + multivalued: true + range: text + rel_x: + name: rel_x + description: x coordinate in electrode group + multivalued: true + range: float32 + rel_y: + name: rel_y + description: y coordinate in electrode group + multivalued: true + range: float32 + rel_z: + name: rel_z + description: z coordinate in electrode group + multivalued: true + range: float32 + reference: + name: reference + description: Description of the reference electrode and/or reference scheme + used for this electrode, e.g., "stainless steel skull screw" or "online + common average referencing". + multivalued: true + range: text + NWBFile__general__intracellular_ephys: + name: NWBFile__general__intracellular_ephys + description: Metadata related to intracellular electrophysiology. + attributes: + name: + name: name + ifabsent: string(intracellular_ephys) + identifier: true + range: string + required: true + equals_string: intracellular_ephys + filtering: + name: filtering + description: '[DEPRECATED] Use IntracellularElectrode.filtering instead. Description + of filtering used. Includes filtering type and parameters, frequency fall-off, + etc. If this changes between TimeSeries, filter description should be stored + as a text attribute for each TimeSeries.' + multivalued: false + range: text + required: false + intracellular_electrode: + name: intracellular_electrode + description: An intracellular electrode. + multivalued: true + range: IntracellularElectrode + required: false + sweep_table: + name: sweep_table + description: '[DEPRECATED] Table used to group different PatchClampSeries. + SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable + tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions + tables provide enhanced support for experiment metadata.' + multivalued: false + range: SweepTable + required: false + intracellular_recordings: + name: intracellular_recordings + description: A table to group together a stimulus and response from a single + electrode and a single simultaneous recording. Each row in the table represents + a single recording consisting typically of a stimulus and a corresponding + response. In some cases, however, only a stimulus or a response are recorded + as as part of an experiment. In this case both, the stimulus and response + will point to the same TimeSeries while the idx_start and count of the invalid + column will be set to -1, thus, indicating that no values have been recorded + for the stimulus or response, respectively. Note, a recording MUST contain + at least a stimulus or a response. Typically the stimulus and response are + PatchClampSeries. However, the use of AD/DA channels that are not associated + to an electrode is also common in intracellular electrophysiology, in which + case other TimeSeries may be used. + multivalued: false + range: IntracellularRecordingsTable + required: false + simultaneous_recordings: + name: simultaneous_recordings + description: A table for grouping different intracellular recordings from + the IntracellularRecordingsTable table together that were recorded simultaneously + from different electrodes + multivalued: false + range: SimultaneousRecordingsTable + required: false + sequential_recordings: + name: sequential_recordings + description: A table for grouping different sequential recordings from the + SimultaneousRecordingsTable table together. This is typically used to group + together sequential recordings where the a sequence of stimuli of the same + type with varying parameters have been presented in a sequence. + multivalued: false + range: SequentialRecordingsTable + required: false + repetitions: + name: repetitions + description: A table for grouping different sequential intracellular recordings + together. With each SequentialRecording typically representing a particular + type of stimulus, the RepetitionsTable table is typically used to group + sets of stimuli applied in sequence. + multivalued: false + range: RepetitionsTable + required: false + experimental_conditions: + name: experimental_conditions + description: A table for grouping different intracellular recording repetitions + together that belong to the same experimental experimental_conditions. + multivalued: false + range: ExperimentalConditionsTable + required: false + LabMetaData: + name: LabMetaData + description: Lab-specific meta-data. + is_a: NWBContainer + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + Subject: + name: Subject + description: Information about the animal or person from which the data was measured. + is_a: NWBContainer + attributes: + name: + name: name + identifier: true + range: string + required: true + age: + name: age + description: Age of subject. Can be supplied instead of 'date_of_birth'. + multivalued: false + range: Subject__age + required: false + date_of_birth: + name: date_of_birth + description: Date of birth of subject. Can be supplied instead of 'age'. + multivalued: false + range: isodatetime + required: false + description: + name: description + description: Description of subject and where subject came from (e.g., breeder, + if animal). + multivalued: false + range: text + required: false + genotype: + name: genotype + description: Genetic strain. If absent, assume Wild Type (WT). + multivalued: false + range: text + required: false + sex: + name: sex + description: Gender of subject. + multivalued: false + range: text + required: false + species: + name: species + description: Species of subject. + multivalued: false + range: text + required: false + strain: + name: strain + description: Strain of subject. + multivalued: false + range: text + required: false + subject_id: + name: subject_id + description: ID of animal/person used/participating in experiment (lab convention). + multivalued: false + range: text + required: false + weight: + name: weight + description: Weight at time of experiment, at time of surgery and at other + important times. + multivalued: false + range: text + required: false + tree_root: true + Subject__age: + name: Subject__age + description: Age of subject. Can be supplied instead of 'date_of_birth'. + attributes: + name: + name: name + ifabsent: string(age) + identifier: true + range: string + required: true + equals_string: age + reference: + name: reference + description: Age is with reference to this event. Can be 'birth' or 'gestational'. + If reference is omitted, 'birth' is implied. + range: text + value: + name: value + range: text + required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml new file mode 100644 index 0000000..352aa82 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.icephys.yaml @@ -0,0 +1,967 @@ +name: core.nwb.icephys +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.icephys +version: 2.7.0 +imports: +- core.nwb.base +- core.nwb.device +- ../../hdmf_common/v1_8_0/namespace +- core.nwb.language +default_prefix: core.nwb.icephys/ +classes: + PatchClampSeries: + name: PatchClampSeries + description: An abstract base class for patch-clamp data - stimulus or response, + current or voltage. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + stimulus_description: + name: stimulus_description + description: Protocol/stimulus name for this patch-clamp dataset. + range: text + sweep_number: + name: sweep_number + description: Sweep number, allows to group different PatchClampSeries together. + range: uint32 + data: + name: data + description: Recorded voltage or current. + multivalued: false + range: PatchClampSeries__data + required: true + gain: + name: gain + description: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt + (c-clamp). + multivalued: false + range: float32 + required: false + tree_root: true + PatchClampSeries__data: + name: PatchClampSeries__data + description: Recorded voltage or current. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion' and add 'offset'. + range: text + array: + name: array + array: + dimensions: + - alias: num_times + range: numeric + CurrentClampSeries: + name: CurrentClampSeries + description: Voltage data from an intracellular current-clamp recording. A corresponding + CurrentClampStimulusSeries (stored separately as a stimulus) is used to store + the current injected. + is_a: PatchClampSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Recorded voltage. + multivalued: false + range: CurrentClampSeries__data + required: true + bias_current: + name: bias_current + description: Bias current, in amps. + multivalued: false + range: float32 + required: false + bridge_balance: + name: bridge_balance + description: Bridge balance, in ohms. + multivalued: false + range: float32 + required: false + capacitance_compensation: + name: capacitance_compensation + description: Capacitance compensation, in farads. + multivalued: false + range: float32 + required: false + tree_root: true + CurrentClampSeries__data: + name: CurrentClampSeries__data + description: Recorded voltage. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'volts'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + value: + name: value + range: AnyType + required: true + IZeroClampSeries: + name: IZeroClampSeries + description: Voltage data from an intracellular recording when all current and + amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There + is no CurrentClampStimulusSeries associated with an IZero series because the + amplifier is disconnected and no stimulus can reach the cell. + is_a: CurrentClampSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + stimulus_description: + name: stimulus_description + description: An IZeroClampSeries has no stimulus, so this attribute is automatically + set to "N/A" + range: text + bias_current: + name: bias_current + description: Bias current, in amps, fixed to 0.0. + multivalued: false + range: float32 + required: true + bridge_balance: + name: bridge_balance + description: Bridge balance, in ohms, fixed to 0.0. + multivalued: false + range: float32 + required: true + capacitance_compensation: + name: capacitance_compensation + description: Capacitance compensation, in farads, fixed to 0.0. + multivalued: false + range: float32 + required: true + tree_root: true + CurrentClampStimulusSeries: + name: CurrentClampStimulusSeries + description: Stimulus current applied during current clamp recording. + is_a: PatchClampSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Stimulus current applied. + multivalued: false + range: CurrentClampStimulusSeries__data + required: true + tree_root: true + CurrentClampStimulusSeries__data: + name: CurrentClampStimulusSeries__data + description: Stimulus current applied. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'amperes'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + value: + name: value + range: AnyType + required: true + VoltageClampSeries: + name: VoltageClampSeries + description: Current data from an intracellular voltage-clamp recording. A corresponding + VoltageClampStimulusSeries (stored separately as a stimulus) is used to store + the voltage injected. + is_a: PatchClampSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Recorded current. + multivalued: false + range: VoltageClampSeries__data + required: true + capacitance_fast: + name: capacitance_fast + description: Fast capacitance, in farads. + multivalued: false + range: VoltageClampSeries__capacitance_fast + required: false + capacitance_slow: + name: capacitance_slow + description: Slow capacitance, in farads. + multivalued: false + range: VoltageClampSeries__capacitance_slow + required: false + resistance_comp_bandwidth: + name: resistance_comp_bandwidth + description: Resistance compensation bandwidth, in hertz. + multivalued: false + range: VoltageClampSeries__resistance_comp_bandwidth + required: false + resistance_comp_correction: + name: resistance_comp_correction + description: Resistance compensation correction, in percent. + multivalued: false + range: VoltageClampSeries__resistance_comp_correction + required: false + resistance_comp_prediction: + name: resistance_comp_prediction + description: Resistance compensation prediction, in percent. + multivalued: false + range: VoltageClampSeries__resistance_comp_prediction + required: false + whole_cell_capacitance_comp: + name: whole_cell_capacitance_comp + description: Whole cell capacitance compensation, in farads. + multivalued: false + range: VoltageClampSeries__whole_cell_capacitance_comp + required: false + whole_cell_series_resistance_comp: + name: whole_cell_series_resistance_comp + description: Whole cell series resistance compensation, in ohms. + multivalued: false + range: VoltageClampSeries__whole_cell_series_resistance_comp + required: false + tree_root: true + VoltageClampSeries__data: + name: VoltageClampSeries__data + description: Recorded current. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'amperes'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + value: + name: value + range: AnyType + required: true + VoltageClampSeries__capacitance_fast: + name: VoltageClampSeries__capacitance_fast + description: Fast capacitance, in farads. + attributes: + name: + name: name + ifabsent: string(capacitance_fast) + identifier: true + range: string + required: true + equals_string: capacitance_fast + unit: + name: unit + description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + range: text + value: + name: value + range: float32 + required: true + VoltageClampSeries__capacitance_slow: + name: VoltageClampSeries__capacitance_slow + description: Slow capacitance, in farads. + attributes: + name: + name: name + ifabsent: string(capacitance_slow) + identifier: true + range: string + required: true + equals_string: capacitance_slow + unit: + name: unit + description: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + range: text + value: + name: value + range: float32 + required: true + VoltageClampSeries__resistance_comp_bandwidth: + name: VoltageClampSeries__resistance_comp_bandwidth + description: Resistance compensation bandwidth, in hertz. + attributes: + name: + name: name + ifabsent: string(resistance_comp_bandwidth) + identifier: true + range: string + required: true + equals_string: resistance_comp_bandwidth + unit: + name: unit + description: Unit of measurement for resistance_comp_bandwidth, which is fixed + to 'hertz'. + range: text + value: + name: value + range: float32 + required: true + VoltageClampSeries__resistance_comp_correction: + name: VoltageClampSeries__resistance_comp_correction + description: Resistance compensation correction, in percent. + attributes: + name: + name: name + ifabsent: string(resistance_comp_correction) + identifier: true + range: string + required: true + equals_string: resistance_comp_correction + unit: + name: unit + description: Unit of measurement for resistance_comp_correction, which is + fixed to 'percent'. + range: text + value: + name: value + range: float32 + required: true + VoltageClampSeries__resistance_comp_prediction: + name: VoltageClampSeries__resistance_comp_prediction + description: Resistance compensation prediction, in percent. + attributes: + name: + name: name + ifabsent: string(resistance_comp_prediction) + identifier: true + range: string + required: true + equals_string: resistance_comp_prediction + unit: + name: unit + description: Unit of measurement for resistance_comp_prediction, which is + fixed to 'percent'. + range: text + value: + name: value + range: float32 + required: true + VoltageClampSeries__whole_cell_capacitance_comp: + name: VoltageClampSeries__whole_cell_capacitance_comp + description: Whole cell capacitance compensation, in farads. + attributes: + name: + name: name + ifabsent: string(whole_cell_capacitance_comp) + identifier: true + range: string + required: true + equals_string: whole_cell_capacitance_comp + unit: + name: unit + description: Unit of measurement for whole_cell_capacitance_comp, which is + fixed to 'farads'. + range: text + value: + name: value + range: float32 + required: true + VoltageClampSeries__whole_cell_series_resistance_comp: + name: VoltageClampSeries__whole_cell_series_resistance_comp + description: Whole cell series resistance compensation, in ohms. + attributes: + name: + name: name + ifabsent: string(whole_cell_series_resistance_comp) + identifier: true + range: string + required: true + equals_string: whole_cell_series_resistance_comp + unit: + name: unit + description: Unit of measurement for whole_cell_series_resistance_comp, which + is fixed to 'ohms'. + range: text + value: + name: value + range: float32 + required: true + VoltageClampStimulusSeries: + name: VoltageClampStimulusSeries + description: Stimulus voltage applied during a voltage clamp recording. + is_a: PatchClampSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Stimulus voltage applied. + multivalued: false + range: VoltageClampStimulusSeries__data + required: true + tree_root: true + VoltageClampStimulusSeries__data: + name: VoltageClampStimulusSeries__data + description: Stimulus voltage applied. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + unit: + name: unit + description: Base unit of measurement for working with the data. which is + fixed to 'volts'. Actual stored values are not necessarily stored in these + units. To access the data in these units, multiply 'data' by 'conversion' + and add 'offset'. + range: text + value: + name: value + range: AnyType + required: true + IntracellularElectrode: + name: IntracellularElectrode + description: An intracellular electrode and its metadata. + is_a: NWBContainer + attributes: + name: + name: name + identifier: true + range: string + required: true + cell_id: + name: cell_id + description: unique ID of the cell + multivalued: false + range: text + required: false + description: + name: description + description: Description of electrode (e.g., whole-cell, sharp, etc.). + multivalued: false + range: text + required: true + filtering: + name: filtering + description: Electrode specific filtering. + multivalued: false + range: text + required: false + initial_access_resistance: + name: initial_access_resistance + description: Initial access resistance. + multivalued: false + range: text + required: false + location: + name: location + description: Location of the electrode. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + multivalued: false + range: text + required: false + resistance: + name: resistance + description: Electrode resistance, in ohms. + multivalued: false + range: text + required: false + seal: + name: seal + description: Information about seal used for recording. + multivalued: false + range: text + required: false + slice: + name: slice + description: Information about slice used for recording. + multivalued: false + range: text + required: false + tree_root: true + SweepTable: + name: SweepTable + description: '[DEPRECATED] Table used to group different PatchClampSeries. SweepTable + is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable + tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions + tables provide enhanced support for experiment metadata.' + is_a: DynamicTable + attributes: + name: + name: name + identifier: true + range: string + required: true + sweep_number: + name: sweep_number + description: Sweep number of the PatchClampSeries in that row. + multivalued: true + range: uint32 + series: + name: series + description: The PatchClampSeries with the sweep number in that row. + multivalued: true + range: PatchClampSeries + series_index: + name: series_index + description: Index for series. + multivalued: false + range: SweepTable__series_index + required: true + tree_root: true + SweepTable__series_index: + name: SweepTable__series_index + description: Index for series. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(series_index) + identifier: true + range: string + required: true + equals_string: series_index + IntracellularElectrodesTable: + name: IntracellularElectrodesTable + description: Table for storing intracellular electrode related metadata. + is_a: DynamicTable + attributes: + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + electrode: + name: electrode + description: Column for storing the reference to the intracellular electrode. + multivalued: true + range: IntracellularElectrode + tree_root: true + IntracellularStimuliTable: + name: IntracellularStimuliTable + description: Table for storing intracellular stimulus related metadata. + is_a: DynamicTable + attributes: + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + stimulus: + name: stimulus + description: Column storing the reference to the recorded stimulus for the + recording (rows). + multivalued: false + range: IntracellularStimuliTable__stimulus + required: true + stimulus_template: + name: stimulus_template + description: Column storing the reference to the stimulus template for the + recording (rows). + multivalued: false + range: IntracellularStimuliTable__stimulus_template + required: false + tree_root: true + IntracellularStimuliTable__stimulus: + name: IntracellularStimuliTable__stimulus + description: Column storing the reference to the recorded stimulus for the recording + (rows). + is_a: TimeSeriesReferenceVectorData + attributes: + name: + name: name + ifabsent: string(stimulus) + identifier: true + range: string + required: true + equals_string: stimulus + IntracellularStimuliTable__stimulus_template: + name: IntracellularStimuliTable__stimulus_template + description: Column storing the reference to the stimulus template for the recording + (rows). + is_a: TimeSeriesReferenceVectorData + attributes: + name: + name: name + ifabsent: string(stimulus_template) + identifier: true + range: string + required: true + equals_string: stimulus_template + IntracellularResponsesTable: + name: IntracellularResponsesTable + description: Table for storing intracellular response related metadata. + is_a: DynamicTable + attributes: + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of what is in this dynamic table. + range: text + response: + name: response + description: Column storing the reference to the recorded response for the + recording (rows) + multivalued: false + range: IntracellularResponsesTable__response + required: true + tree_root: true + IntracellularResponsesTable__response: + name: IntracellularResponsesTable__response + description: Column storing the reference to the recorded response for the recording + (rows) + is_a: TimeSeriesReferenceVectorData + attributes: + name: + name: name + ifabsent: string(response) + identifier: true + range: string + required: true + equals_string: response + IntracellularRecordingsTable: + name: IntracellularRecordingsTable + description: A table to group together a stimulus and response from a single electrode + and a single simultaneous recording. Each row in the table represents a single + recording consisting typically of a stimulus and a corresponding response. In + some cases, however, only a stimulus or a response is recorded as part of an + experiment. In this case, both the stimulus and response will point to the same + TimeSeries while the idx_start and count of the invalid column will be set to + -1, thus, indicating that no values have been recorded for the stimulus or response, + respectively. Note, a recording MUST contain at least a stimulus or a response. + Typically the stimulus and response are PatchClampSeries. However, the use of + AD/DA channels that are not associated to an electrode is also common in intracellular + electrophysiology, in which case other TimeSeries may be used. + is_a: AlignedDynamicTable + attributes: + name: + name: name + ifabsent: string(intracellular_recordings) + identifier: true + range: string + required: true + equals_string: intracellular_recordings + description: + name: description + description: Description of the contents of this table. Inherited from AlignedDynamicTable + and overwritten here to fix the value of the attribute. + range: text + electrodes: + name: electrodes + description: Table for storing intracellular electrode related metadata. + multivalued: false + range: IntracellularElectrodesTable + required: true + stimuli: + name: stimuli + description: Table for storing intracellular stimulus related metadata. + multivalued: false + range: IntracellularStimuliTable + required: true + responses: + name: responses + description: Table for storing intracellular response related metadata. + multivalued: false + range: IntracellularResponsesTable + required: true + tree_root: true + SimultaneousRecordingsTable: + name: SimultaneousRecordingsTable + description: A table for grouping different intracellular recordings from the + IntracellularRecordingsTable table together that were recorded simultaneously + from different electrodes. + is_a: DynamicTable + attributes: + name: + name: name + ifabsent: string(simultaneous_recordings) + identifier: true + range: string + required: true + equals_string: simultaneous_recordings + recordings: + name: recordings + description: A reference to one or more rows in the IntracellularRecordingsTable + table. + multivalued: false + range: SimultaneousRecordingsTable__recordings + required: true + recordings_index: + name: recordings_index + description: Index dataset for the recordings column. + multivalued: false + range: SimultaneousRecordingsTable__recordings_index + required: true + tree_root: true + SimultaneousRecordingsTable__recordings: + name: SimultaneousRecordingsTable__recordings + description: A reference to one or more rows in the IntracellularRecordingsTable + table. + is_a: DynamicTableRegion + attributes: + name: + name: name + ifabsent: string(recordings) + identifier: true + range: string + required: true + equals_string: recordings + table: + name: table + description: Reference to the IntracellularRecordingsTable table that this + table region applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + range: IntracellularRecordingsTable + SimultaneousRecordingsTable__recordings_index: + name: SimultaneousRecordingsTable__recordings_index + description: Index dataset for the recordings column. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(recordings_index) + identifier: true + range: string + required: true + equals_string: recordings_index + SequentialRecordingsTable: + name: SequentialRecordingsTable + description: A table for grouping different sequential recordings from the SimultaneousRecordingsTable + table together. This is typically used to group together sequential recordings + where a sequence of stimuli of the same type with varying parameters have been + presented in a sequence. + is_a: DynamicTable + attributes: + name: + name: name + ifabsent: string(sequential_recordings) + identifier: true + range: string + required: true + equals_string: sequential_recordings + simultaneous_recordings: + name: simultaneous_recordings + description: A reference to one or more rows in the SimultaneousRecordingsTable + table. + multivalued: false + range: SequentialRecordingsTable__simultaneous_recordings + required: true + simultaneous_recordings_index: + name: simultaneous_recordings_index + description: Index dataset for the simultaneous_recordings column. + multivalued: false + range: SequentialRecordingsTable__simultaneous_recordings_index + required: true + stimulus_type: + name: stimulus_type + description: The type of stimulus used for the sequential recording. + multivalued: true + range: text + tree_root: true + SequentialRecordingsTable__simultaneous_recordings: + name: SequentialRecordingsTable__simultaneous_recordings + description: A reference to one or more rows in the SimultaneousRecordingsTable + table. + is_a: DynamicTableRegion + attributes: + name: + name: name + ifabsent: string(simultaneous_recordings) + identifier: true + range: string + required: true + equals_string: simultaneous_recordings + table: + name: table + description: Reference to the SimultaneousRecordingsTable table that this + table region applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + range: SimultaneousRecordingsTable + SequentialRecordingsTable__simultaneous_recordings_index: + name: SequentialRecordingsTable__simultaneous_recordings_index + description: Index dataset for the simultaneous_recordings column. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(simultaneous_recordings_index) + identifier: true + range: string + required: true + equals_string: simultaneous_recordings_index + RepetitionsTable: + name: RepetitionsTable + description: A table for grouping different sequential intracellular recordings + together. With each SequentialRecording typically representing a particular + type of stimulus, the RepetitionsTable table is typically used to group sets + of stimuli applied in sequence. + is_a: DynamicTable + attributes: + name: + name: name + ifabsent: string(repetitions) + identifier: true + range: string + required: true + equals_string: repetitions + sequential_recordings: + name: sequential_recordings + description: A reference to one or more rows in the SequentialRecordingsTable + table. + multivalued: false + range: RepetitionsTable__sequential_recordings + required: true + sequential_recordings_index: + name: sequential_recordings_index + description: Index dataset for the sequential_recordings column. + multivalued: false + range: RepetitionsTable__sequential_recordings_index + required: true + tree_root: true + RepetitionsTable__sequential_recordings: + name: RepetitionsTable__sequential_recordings + description: A reference to one or more rows in the SequentialRecordingsTable + table. + is_a: DynamicTableRegion + attributes: + name: + name: name + ifabsent: string(sequential_recordings) + identifier: true + range: string + required: true + equals_string: sequential_recordings + table: + name: table + description: Reference to the SequentialRecordingsTable table that this table + region applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + range: SequentialRecordingsTable + RepetitionsTable__sequential_recordings_index: + name: RepetitionsTable__sequential_recordings_index + description: Index dataset for the sequential_recordings column. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(sequential_recordings_index) + identifier: true + range: string + required: true + equals_string: sequential_recordings_index + ExperimentalConditionsTable: + name: ExperimentalConditionsTable + description: A table for grouping different intracellular recording repetitions + together that belong to the same experimental condition. + is_a: DynamicTable + attributes: + name: + name: name + ifabsent: string(experimental_conditions) + identifier: true + range: string + required: true + equals_string: experimental_conditions + repetitions: + name: repetitions + description: A reference to one or more rows in the RepetitionsTable table. + multivalued: false + range: ExperimentalConditionsTable__repetitions + required: true + repetitions_index: + name: repetitions_index + description: Index dataset for the repetitions column. + multivalued: false + range: ExperimentalConditionsTable__repetitions_index + required: true + tree_root: true + ExperimentalConditionsTable__repetitions: + name: ExperimentalConditionsTable__repetitions + description: A reference to one or more rows in the RepetitionsTable table. + is_a: DynamicTableRegion + attributes: + name: + name: name + ifabsent: string(repetitions) + identifier: true + range: string + required: true + equals_string: repetitions + table: + name: table + description: Reference to the RepetitionsTable table that this table region + applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + range: RepetitionsTable + ExperimentalConditionsTable__repetitions_index: + name: ExperimentalConditionsTable__repetitions_index + description: Index dataset for the repetitions column. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(repetitions_index) + identifier: true + range: string + required: true + equals_string: repetitions_index diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml new file mode 100644 index 0000000..ddf68aa --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.image.yaml @@ -0,0 +1,251 @@ +name: core.nwb.image +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.image +version: 2.7.0 +imports: +- core.nwb.base +- core.nwb.device +- core.nwb.language +default_prefix: core.nwb.image/ +classes: + GrayscaleImage: + name: GrayscaleImage + description: A grayscale image. + is_a: Image + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + RGBImage: + name: RGBImage + description: A color image. + is_a: Image + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + RGBAImage: + name: RGBAImage + description: A color image with transparency. + is_a: Image + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + ImageSeries: + name: ImageSeries + description: General image data that is common between acquisition and stimulus + time series. Sometimes the image data is stored in the file in a raw format + while other times it will be stored as a series of external image files in the + host file system. The data field will either be binary data, if the data is + stored in the NWB file, or empty, if the data is stored in an external image + stack. [frame][x][y] or [frame][x][y][z]. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Binary data representing images across frames. If data are stored + in an external file, this should be an empty 3D array. + multivalued: false + range: numeric + required: true + any_of: + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - alias: z + dimension: + name: dimension + description: Number of pixels on x, y, (and z) axes. + multivalued: false + array: + dimensions: + - alias: rank + range: int32 + required: false + external_file: + name: external_file + description: Paths to one or more external file(s). The field is only present + if format='external'. This is only relevant if the image series is stored + in the file system as one or more image file(s). This field should NOT be + used if the image is stored in another NWB file and that file is linked + to this file. + multivalued: false + range: ImageSeries__external_file + required: false + format: + name: format + description: Format of image. If this is 'external', then the attribute 'external_file' + contains the path information to the image files. If this is 'raw', then + the raw (single-channel) binary data is stored in the 'data' dataset. If + this attribute is not present, then the default format='raw' case is assumed. + multivalued: false + range: text + required: false + tree_root: true + ImageSeries__external_file: + name: ImageSeries__external_file + description: Paths to one or more external file(s). The field is only present + if format='external'. This is only relevant if the image series is stored in + the file system as one or more image file(s). This field should NOT be used + if the image is stored in another NWB file and that file is linked to this file. + attributes: + name: + name: name + ifabsent: string(external_file) + identifier: true + range: string + required: true + equals_string: external_file + starting_frame: + name: starting_frame + description: Each external image may contain one or more consecutive frames + of the full ImageSeries. This attribute serves as an index to indicate which + frames each file contains, to facilitate random access. The 'starting_frame' + attribute, hence, contains a list of frame numbers within the full ImageSeries + of the first frame of each file listed in the parent 'external_file' dataset. + Zero-based indexing is used (hence, the first element will always be zero). + For example, if the 'external_file' dataset has three paths to files and + the first file has 5 frames, the second file has 10 frames, and the third + file has 20 frames, then this attribute will have values [0, 5, 15]. If + there is a single external file that holds all of the frames of the ImageSeries + (and so there is a single element in the 'external_file' dataset), then + this attribute should have value [0]. + range: int32 + array: + name: array + array: + dimensions: + - alias: num_files + range: text + ImageMaskSeries: + name: ImageMaskSeries + description: An alpha mask that is applied to a presented visual stimulus. The + 'data' array contains an array of mask values that are applied to the displayed + image. Mask values are stored as RGBA. Mask can vary with time. The timestamps + array indicates the starting time of a mask, and that mask pattern continues + until it's explicitly changed. + is_a: ImageSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + OpticalSeries: + name: OpticalSeries + description: Image data that is presented or recorded. A stimulus template movie + will be stored only as an image. When the image is presented as stimulus, additional + data is required, such as field of view (e.g., how much of the visual field + the image covers, or how what is the area of the target being imaged). If the + OpticalSeries represents acquired imaging data, orientation is also important. + is_a: ImageSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + distance: + name: distance + description: Distance from camera/monitor to target/eye. + multivalued: false + range: float32 + required: false + field_of_view: + name: field_of_view + description: Width, height and depth of image, or imaged area, in meters. + multivalued: false + range: float32 + required: false + any_of: + - array: + dimensions: + - alias: width, height + exact_cardinality: 2 + - array: + dimensions: + - alias: width, height, depth + exact_cardinality: 3 + data: + name: data + description: Images presented to subject, either grayscale or RGB + multivalued: false + range: numeric + required: true + any_of: + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - array: + dimensions: + - alias: frame + - alias: x + - alias: y + - alias: r, g, b + exact_cardinality: 3 + orientation: + name: orientation + description: Description of image relative to some reference frame (e.g., + which way is up). Must also specify frame of reference. + multivalued: false + range: text + required: false + tree_root: true + IndexSeries: + name: IndexSeries + description: Stores indices to image frames stored in an ImageSeries. The purpose + of the IndexSeries is to allow a static image stack to be stored in an Images + object, and the images in the stack to be referenced out-of-order. This can + be for the display of individual images, or of movie segments (as a movie is + simply a series of images). The data field stores the index of the frame in + the referenced Images object, and the timestamps array indicates when that image + was displayed. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Index of the image (using zero-indexing) in the linked Images + object. + multivalued: false + array: + dimensions: + - alias: num_times + range: uint32 + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml new file mode 100644 index 0000000..929628d --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.language.yaml @@ -0,0 +1,146 @@ +name: core.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: core +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +enums: + FlatDType: + name: FlatDType + permissible_values: + float: + text: float + float32: + text: float32 + double: + text: double + float64: + text: float64 + long: + text: long + int64: + text: int64 + int: + text: int + int32: + text: int32 + int16: + text: int16 + short: + text: short + int8: + text: int8 + uint: + text: uint + uint32: + text: uint32 + uint16: + text: uint16 + uint8: + text: uint8 + uint64: + text: uint64 + numeric: + text: numeric + text: + text: text + utf: + text: utf + utf8: + text: utf8 + utf_8: + text: utf_8 + ascii: + text: ascii + bool: + text: bool + isodatetime: + text: isodatetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml new file mode 100644 index 0000000..ac63cad --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.misc.yaml @@ -0,0 +1,491 @@ +name: core.nwb.misc +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.misc +version: 2.7.0 +imports: +- core.nwb.base +- ../../hdmf_common/v1_8_0/namespace +- core.nwb.ecephys +- core.nwb.language +default_prefix: core.nwb.misc/ +classes: + AbstractFeatureSeries: + name: AbstractFeatureSeries + description: Abstract features, such as quantitative descriptions of sensory stimuli. + The TimeSeries::data field is a 2D array, storing those features (e.g., for + visual grating stimulus this might be orientation, spatial frequency and contrast). + Null stimuli (eg, uniform gray) can be marked as being an independent feature + (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, + or through use of the TimeSeries::control fields. A set of features is considered + to persist until the next set of features is defined. The final set of features + stored should be the null set. This is useful when storing the raw stimulus + is impractical. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Values of each feature at each time. + multivalued: false + range: AbstractFeatureSeries__data + required: true + feature_units: + name: feature_units + description: Units of each feature. + multivalued: false + array: + dimensions: + - alias: num_features + range: text + required: false + features: + name: features + description: Description of the features represented in TimeSeries::data. + multivalued: false + array: + dimensions: + - alias: num_features + range: text + required: true + tree_root: true + AbstractFeatureSeries__data: + name: AbstractFeatureSeries__data + description: Values of each feature at each time. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + unit: + name: unit + description: Since there can be different units for different features, store + the units in 'feature_units'. The default value for this attribute is "see + 'feature_units'". + range: text + array: + name: array + range: numeric + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_features + AnnotationSeries: + name: AnnotationSeries + description: Stores user annotations made during an experiment. The data[] field + stores a text array, and timestamps are stored for each annotation (ie, interval=1). + This is largely an alias to a standard TimeSeries storing a text array but that + is identifiable as storing annotations in a machine-readable way. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Annotations made during an experiment. + multivalued: false + array: + dimensions: + - alias: num_times + range: text + required: true + tree_root: true + IntervalSeries: + name: IntervalSeries + description: Stores intervals of data. The timestamps field stores the beginning + and end of intervals. The data field stores whether the interval just started + (>0 value) or ended (<0 value). Different interval types can be represented + in the same series by using multiple key values (eg, 1 for feature A, 2 for + feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This + is largely an alias of a standard TimeSeries but that is identifiable as representing + time intervals in a machine-readable way. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Use values >0 if interval started, <0 if interval ended. + multivalued: false + array: + dimensions: + - alias: num_times + range: int8 + required: true + tree_root: true + DecompositionSeries: + name: DecompositionSeries + description: Spectral analysis of a time series, e.g. of an LFP or a speech signal. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Data decomposed into frequency bands. + multivalued: false + range: DecompositionSeries__data + required: true + metric: + name: metric + description: The metric used, e.g. phase, amplitude, power. + multivalued: false + range: text + required: true + source_channels: + name: source_channels + description: DynamicTableRegion pointer to the channels that this decomposition + series was generated from. + multivalued: false + range: DecompositionSeries__source_channels + required: false + bands: + name: bands + description: Table for describing the bands that this series was generated + from. There should be one row in this table for each band. + multivalued: false + range: DecompositionSeries__bands + required: true + tree_root: true + DecompositionSeries__data: + name: DecompositionSeries__data + description: Data decomposed into frequency bands. + attributes: + name: + name: name + ifabsent: string(data) + identifier: true + range: string + required: true + equals_string: data + unit: + name: unit + description: Base unit of measurement for working with the data. Actual stored + values are not necessarily stored in these units. To access the data in + these units, multiply 'data' by 'conversion'. + range: text + array: + name: array + array: + dimensions: + - alias: num_times + - alias: num_channels + - alias: num_bands + range: numeric + DecompositionSeries__source_channels: + name: DecompositionSeries__source_channels + description: DynamicTableRegion pointer to the channels that this decomposition + series was generated from. + is_a: DynamicTableRegion + attributes: + name: + name: name + ifabsent: string(source_channels) + identifier: true + range: string + required: true + equals_string: source_channels + DecompositionSeries__bands: + name: DecompositionSeries__bands + description: Table for describing the bands that this series was generated from. + There should be one row in this table for each band. + is_a: DynamicTable + attributes: + name: + name: name + ifabsent: string(bands) + identifier: true + range: string + required: true + equals_string: bands + band_name: + name: band_name + description: Name of the band, e.g. theta. + multivalued: true + range: text + band_limits: + name: band_limits + description: Low and high limit of each band in Hz. If it is a Gaussian filter, + use 2 SD on either side of the center. + multivalued: false + array: + dimensions: + - alias: num_bands + - alias: low, high + exact_cardinality: 2 + range: float32 + required: true + band_mean: + name: band_mean + description: The mean Gaussian filters, in Hz. + multivalued: false + array: + dimensions: + - alias: num_bands + range: float32 + required: true + band_stdev: + name: band_stdev + description: The standard deviation of Gaussian filters, in Hz. + multivalued: false + array: + dimensions: + - alias: num_bands + range: float32 + required: true + Units: + name: Units + description: Data about spiking units. Event times of observed units (e.g. cell, + synapse, etc.) should be concatenated and stored in spike_times. + is_a: DynamicTable + attributes: + name: + name: name + ifabsent: string(Units) + identifier: true + range: string + required: true + spike_times_index: + name: spike_times_index + description: Index into the spike_times dataset. + multivalued: false + range: Units__spike_times_index + required: false + spike_times: + name: spike_times + description: Spike times for each unit in seconds. + multivalued: false + range: Units__spike_times + required: false + obs_intervals_index: + name: obs_intervals_index + description: Index into the obs_intervals dataset. + multivalued: false + range: Units__obs_intervals_index + required: false + obs_intervals: + name: obs_intervals + description: Observation intervals for each unit. + multivalued: false + array: + dimensions: + - alias: num_intervals + - alias: start|end + exact_cardinality: 2 + range: float64 + required: false + electrodes_index: + name: electrodes_index + description: Index into electrodes. + multivalued: false + range: Units__electrodes_index + required: false + electrodes: + name: electrodes + description: Electrode that each spike unit came from, specified using a DynamicTableRegion. + multivalued: false + range: Units__electrodes + required: false + electrode_group: + name: electrode_group + description: Electrode group that each spike unit came from. + multivalued: true + range: ElectrodeGroup + waveform_mean: + name: waveform_mean + description: Spike waveform mean for each spike unit. + multivalued: false + range: float32 + required: false + any_of: + - array: + dimensions: + - alias: num_units + - alias: num_samples + - array: + dimensions: + - alias: num_units + - alias: num_samples + - alias: num_electrodes + waveform_sd: + name: waveform_sd + description: Spike waveform standard deviation for each spike unit. + multivalued: false + range: float32 + required: false + any_of: + - array: + dimensions: + - alias: num_units + - alias: num_samples + - array: + dimensions: + - alias: num_units + - alias: num_samples + - alias: num_electrodes + waveforms: + name: waveforms + description: Individual waveforms for each spike on each electrode. This is + a doubly indexed column. The 'waveforms_index' column indexes which waveforms + in this column belong to the same spike event for a given unit, where each + waveform was recorded from a different electrode. The 'waveforms_index_index' + column indexes the 'waveforms_index' column to indicate which spike events + belong to a given unit. For example, if the 'waveforms_index_index' column + has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' + column correspond to the 2 spike events of the first unit, the next 3 elements + of the 'waveforms_index' column correspond to the 3 spike events of the + second unit, and the next 1 element of the 'waveforms_index' column corresponds + to the 1 spike event of the third unit. If the 'waveforms_index' column + has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' + column contain the 3 spike waveforms that were recorded from 3 different + electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays + for a graphical representation of this example. When there is only one electrode + for each unit (i.e., each spike time is associated with a single waveform), + then the 'waveforms_index' column will have values 1, 2, ..., N, where N + is the number of spike events. The number of electrodes for each spike event + should be the same within a given unit. The 'electrodes' column should be + used to indicate which electrodes are associated with each unit, and the + order of the waveforms within a given unit x spike event should be in the + same order as the electrodes referenced in the 'electrodes' column of this + table. The number of samples for each waveform must be the same. + multivalued: false + array: + dimensions: + - alias: num_waveforms + - alias: num_samples + range: numeric + required: false + waveforms_index: + name: waveforms_index + description: Index into the waveforms dataset. One value for every spike event. + See 'waveforms' for more detail. + multivalued: false + range: Units__waveforms_index + required: false + waveforms_index_index: + name: waveforms_index_index + description: Index into the waveforms_index dataset. One value for every unit + (row in the table). See 'waveforms' for more detail. + multivalued: false + range: Units__waveforms_index_index + required: false + tree_root: true + Units__spike_times_index: + name: Units__spike_times_index + description: Index into the spike_times dataset. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(spike_times_index) + identifier: true + range: string + required: true + equals_string: spike_times_index + Units__spike_times: + name: Units__spike_times + description: Spike times for each unit in seconds. + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(spike_times) + identifier: true + range: string + required: true + equals_string: spike_times + resolution: + name: resolution + description: The smallest possible difference between two spike times. Usually + 1 divided by the acquisition sampling rate from which spike times were extracted, + but could be larger if the acquisition time series was downsampled or smaller + if the acquisition time series was smoothed/interpolated and it is possible + for the spike time to be between samples. + range: float64 + Units__obs_intervals_index: + name: Units__obs_intervals_index + description: Index into the obs_intervals dataset. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(obs_intervals_index) + identifier: true + range: string + required: true + equals_string: obs_intervals_index + Units__electrodes_index: + name: Units__electrodes_index + description: Index into electrodes. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(electrodes_index) + identifier: true + range: string + required: true + equals_string: electrodes_index + Units__electrodes: + name: Units__electrodes + description: Electrode that each spike unit came from, specified using a DynamicTableRegion. + is_a: DynamicTableRegion + attributes: + name: + name: name + ifabsent: string(electrodes) + identifier: true + range: string + required: true + equals_string: electrodes + Units__waveforms_index: + name: Units__waveforms_index + description: Index into the waveforms dataset. One value for every spike event. + See 'waveforms' for more detail. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(waveforms_index) + identifier: true + range: string + required: true + equals_string: waveforms_index + Units__waveforms_index_index: + name: Units__waveforms_index_index + description: Index into the waveforms_index dataset. One value for every unit + (row in the table). See 'waveforms' for more detail. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(waveforms_index_index) + identifier: true + range: string + required: true + equals_string: waveforms_index_index diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml new file mode 100644 index 0000000..de9e5e0 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ogen.yaml @@ -0,0 +1,74 @@ +name: core.nwb.ogen +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.ogen +version: 2.7.0 +imports: +- core.nwb.base +- core.nwb.device +- core.nwb.language +default_prefix: core.nwb.ogen/ +classes: + OptogeneticSeries: + name: OptogeneticSeries + description: An optogenetic stimulus. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Applied power for optogenetic stimulus, in watts. Shape can be + 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries + that defines what the second dimension represents. + multivalued: false + range: numeric + required: true + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_rois + tree_root: true + OptogeneticStimulusSite: + name: OptogeneticStimulusSite + description: A site of optogenetic stimulation. + is_a: NWBContainer + attributes: + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of stimulation site. + multivalued: false + range: text + required: true + excitation_lambda: + name: excitation_lambda + description: Excitation wavelength, in nm. + multivalued: false + range: float32 + required: true + location: + name: location + description: Location of the stimulation site. Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + multivalued: false + range: text + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml new file mode 100644 index 0000000..51da17c --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.ophys.yaml @@ -0,0 +1,421 @@ +name: core.nwb.ophys +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.ophys +version: 2.7.0 +imports: +- core.nwb.image +- core.nwb.base +- ../../hdmf_common/v1_8_0/namespace +- core.nwb.device +- core.nwb.language +default_prefix: core.nwb.ophys/ +classes: + OnePhotonSeries: + name: OnePhotonSeries + description: Image stack recorded over time from 1-photon microscope. + is_a: ImageSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + pmt_gain: + name: pmt_gain + description: Photomultiplier gain. + range: float32 + scan_line_rate: + name: scan_line_rate + description: Lines imaged per second. This is also stored in /general/optophysiology + but is kept here as it is useful information for analysis, and so good to + be stored w/ the actual data. + range: float32 + exposure_time: + name: exposure_time + description: Exposure time of the sample; often the inverse of the frequency. + range: float32 + binning: + name: binning + description: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. + range: uint8 + power: + name: power + description: Power of the excitation in mW, if known. + range: float32 + intensity: + name: intensity + description: Intensity of the excitation in mW/mm^2, if known. + range: float32 + tree_root: true + TwoPhotonSeries: + name: TwoPhotonSeries + description: Image stack recorded over time from 2-photon microscope. + is_a: ImageSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + pmt_gain: + name: pmt_gain + description: Photomultiplier gain. + range: float32 + scan_line_rate: + name: scan_line_rate + description: Lines imaged per second. This is also stored in /general/optophysiology + but is kept here as it is useful information for analysis, and so good to + be stored w/ the actual data. + range: float32 + field_of_view: + name: field_of_view + description: Width, height and depth of image, or imaged area, in meters. + multivalued: false + range: float32 + required: false + any_of: + - array: + dimensions: + - alias: width|height + exact_cardinality: 2 + - array: + dimensions: + - alias: width|height|depth + exact_cardinality: 3 + tree_root: true + RoiResponseSeries: + name: RoiResponseSeries + description: ROI responses over an imaging plane. The first dimension represents + time. The second dimension, if present, represents ROIs. + is_a: TimeSeries + attributes: + name: + name: name + identifier: true + range: string + required: true + data: + name: data + description: Signals from ROIs. + multivalued: false + range: numeric + required: true + any_of: + - array: + dimensions: + - alias: num_times + - array: + dimensions: + - alias: num_times + - alias: num_ROIs + rois: + name: rois + description: DynamicTableRegion referencing into an ROITable containing information + on the ROIs stored in this timeseries. + multivalued: false + range: RoiResponseSeries__rois + required: true + tree_root: true + RoiResponseSeries__rois: + name: RoiResponseSeries__rois + description: DynamicTableRegion referencing into an ROITable containing information + on the ROIs stored in this timeseries. + is_a: DynamicTableRegion + attributes: + name: + name: name + ifabsent: string(rois) + identifier: true + range: string + required: true + equals_string: rois + DfOverF: + name: DfOverF + description: dF/F information about a region of interest (ROI). Storage hierarchy + of dF/F should be the same as for segmentation (i.e., same names for ROIs and + for image planes). + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries + tree_root: true + Fluorescence: + name: Fluorescence + description: Fluorescence information about a region of interest (ROI). Storage + hierarchy of fluorescence should be the same as for segmentation (ie, same names + for ROIs and for image planes). + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: RoiResponseSeries + tree_root: true + ImageSegmentation: + name: ImageSegmentation + description: Stores pixels in an image that represent different regions of interest + (ROIs) or masks. All segmentation for a given imaging plane is stored together, + with storage for multiple imaging planes (masks) supported. Each ROI is stored + in its own subgroup, with the ROI group containing both a 2D mask and a list + of pixels that make up this mask. Segments can also be used for masking neuropil. + If segmentation is allowed to change with time, a new imaging plane (or module) + is required and ROI names should remain consistent between them. + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: PlaneSegmentation + tree_root: true + PlaneSegmentation: + name: PlaneSegmentation + description: Results from image segmentation of a specific imaging plane. + is_a: DynamicTable + attributes: + name: + name: name + identifier: true + range: string + required: true + image_mask: + name: image_mask + description: ROI masks for each ROI. Each image mask is the size of the original + imaging plane (or volume) and members of the ROI are finite non-zero. + multivalued: false + range: AnyType + required: false + any_of: + - array: + dimensions: + - alias: num_roi + - alias: num_x + - alias: num_y + - array: + dimensions: + - alias: num_roi + - alias: num_x + - alias: num_y + - alias: num_z + pixel_mask_index: + name: pixel_mask_index + description: Index into pixel_mask. + multivalued: false + range: PlaneSegmentation__pixel_mask_index + required: false + pixel_mask: + name: pixel_mask + description: 'Pixel masks for each ROI: a list of indices and weights for + the ROI. Pixel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + multivalued: false + range: PlaneSegmentation__pixel_mask + required: false + voxel_mask_index: + name: voxel_mask_index + description: Index into voxel_mask. + multivalued: false + range: PlaneSegmentation__voxel_mask_index + required: false + voxel_mask: + name: voxel_mask + description: 'Voxel masks for each ROI: a list of indices and weights for + the ROI. Voxel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + multivalued: false + range: PlaneSegmentation__voxel_mask + required: false + reference_images: + name: reference_images + description: Image stacks that the segmentation masks apply to. + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: ImageSeries + tree_root: true + PlaneSegmentation__pixel_mask_index: + name: PlaneSegmentation__pixel_mask_index + description: Index into pixel_mask. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(pixel_mask_index) + identifier: true + range: string + required: true + equals_string: pixel_mask_index + PlaneSegmentation__pixel_mask: + name: PlaneSegmentation__pixel_mask + description: 'Pixel masks for each ROI: a list of indices and weights for the + ROI. Pixel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(pixel_mask) + identifier: true + range: string + required: true + equals_string: pixel_mask + x: + name: x + description: Pixel x-coordinate. + multivalued: false + range: uint32 + required: false + y: + name: y + description: Pixel y-coordinate. + multivalued: false + range: uint32 + required: false + weight: + name: weight + description: Weight of the pixel. + multivalued: false + range: float32 + required: false + PlaneSegmentation__voxel_mask_index: + name: PlaneSegmentation__voxel_mask_index + description: Index into voxel_mask. + is_a: VectorIndex + attributes: + name: + name: name + ifabsent: string(voxel_mask_index) + identifier: true + range: string + required: true + equals_string: voxel_mask_index + PlaneSegmentation__voxel_mask: + name: PlaneSegmentation__voxel_mask + description: 'Voxel masks for each ROI: a list of indices and weights for the + ROI. Voxel masks are concatenated and parsing of this dataset is maintained + by the PlaneSegmentation' + is_a: VectorData + attributes: + name: + name: name + ifabsent: string(voxel_mask) + identifier: true + range: string + required: true + equals_string: voxel_mask + x: + name: x + description: Voxel x-coordinate. + multivalued: false + range: uint32 + required: false + y: + name: y + description: Voxel y-coordinate. + multivalued: false + range: uint32 + required: false + z: + name: z + description: Voxel z-coordinate. + multivalued: false + range: uint32 + required: false + weight: + name: weight + description: Weight of the voxel. + multivalued: false + range: float32 + required: false + ImagingPlane: + name: ImagingPlane + description: An imaging plane and its metadata. + is_a: NWBContainer + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: OpticalChannel + tree_root: true + OpticalChannel: + name: OpticalChannel + description: An optical channel used to record from an imaging plane. + is_a: NWBContainer + attributes: + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description or other notes about the channel. + multivalued: false + range: text + required: true + emission_lambda: + name: emission_lambda + description: Emission wavelength for channel, in nm. + multivalued: false + range: float32 + required: true + tree_root: true + MotionCorrection: + name: MotionCorrection + description: 'An image stack where all frames are shifted (registered) to a common + coordinate system, to account for movement and drift between frames. Note: each + frame at each point in time is assumed to be 2-D (has only x & y dimensions).' + is_a: NWBDataInterface + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: CorrectedImageStack + tree_root: true + CorrectedImageStack: + name: CorrectedImageStack + description: Results from motion correction of an image stack. + is_a: NWBDataInterface + attributes: + name: + name: name + identifier: true + range: string + required: true + corrected: + name: corrected + description: Image stack with frames shifted to the common coordinates. + multivalued: false + range: ImageSeries + required: true + xy_translation: + name: xy_translation + description: Stores the x,y delta necessary to align each frame to the common + coordinates, for example, to align each frame to a reference image. + multivalued: false + range: TimeSeries + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml new file mode 100644 index 0000000..402df85 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/core.nwb.retinotopy.yaml @@ -0,0 +1,326 @@ +name: core.nwb.retinotopy +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: core +id: core.nwb.retinotopy +version: 2.7.0 +imports: +- core.nwb.base +- core.nwb.language +default_prefix: core.nwb.retinotopy/ +classes: + ImagingRetinotopy: + name: ImagingRetinotopy + description: 'DEPRECATED. Intrinsic signal optical imaging or widefield imaging + for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) + of responses to specific stimuli and a combined polarity map from which to identify + visual areas. This group does not store the raw responses imaged during retinotopic + mapping or the stimuli presented, but rather the resulting phase and power maps + after applying a Fourier transform on the averaged responses. Note: for data + consistency, all images and arrays are stored in the format [row][column] and + [row, col], which equates to [y][x]. Field of view and dimension arrays may + appear backward (i.e., y before x).' + is_a: NWBDataInterface + attributes: + name: + name: name + ifabsent: string(ImagingRetinotopy) + identifier: true + range: string + required: true + axis_1_phase_map: + name: axis_1_phase_map + description: Phase response to stimulus on the first measured axis. + multivalued: false + range: ImagingRetinotopy__axis_1_phase_map + required: true + axis_1_power_map: + name: axis_1_power_map + description: Power response on the first measured axis. Response is scaled + so 0.0 is no power in the response and 1.0 is maximum relative power. + multivalued: false + range: ImagingRetinotopy__axis_1_power_map + required: false + axis_2_phase_map: + name: axis_2_phase_map + description: Phase response to stimulus on the second measured axis. + multivalued: false + range: ImagingRetinotopy__axis_2_phase_map + required: true + axis_2_power_map: + name: axis_2_power_map + description: Power response on the second measured axis. Response is scaled + so 0.0 is no power in the response and 1.0 is maximum relative power. + multivalued: false + range: ImagingRetinotopy__axis_2_power_map + required: false + axis_descriptions: + name: axis_descriptions + description: Two-element array describing the contents of the two response + axis fields. Description should be something like ['altitude', 'azimuth'] + or '['radius', 'theta']. + multivalued: false + array: + dimensions: + - alias: axis_1, axis_2 + exact_cardinality: 2 + range: text + required: true + focal_depth_image: + name: focal_depth_image + description: 'Gray-scale image taken with same settings/parameters (e.g., + focal depth, wavelength) as data collection. Array format: [rows][columns].' + multivalued: false + range: ImagingRetinotopy__focal_depth_image + required: false + sign_map: + name: sign_map + description: Sine of the angle between the direction of the gradient in axis_1 + and axis_2. + multivalued: false + range: ImagingRetinotopy__sign_map + required: false + vasculature_image: + name: vasculature_image + description: 'Gray-scale anatomical image of cortical surface. Array structure: + [rows][columns]' + multivalued: false + range: ImagingRetinotopy__vasculature_image + required: true + tree_root: true + ImagingRetinotopy__axis_1_phase_map: + name: ImagingRetinotopy__axis_1_phase_map + description: Phase response to stimulus on the first measured axis. + attributes: + name: + name: name + ifabsent: string(axis_1_phase_map) + identifier: true + range: string + required: true + equals_string: axis_1_phase_map + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + unit: + name: unit + description: Unit that axis data is stored in (e.g., degrees). + range: text + array: + name: array + array: + dimensions: + - alias: num_rows + - alias: num_cols + range: float32 + ImagingRetinotopy__axis_1_power_map: + name: ImagingRetinotopy__axis_1_power_map + description: Power response on the first measured axis. Response is scaled so + 0.0 is no power in the response and 1.0 is maximum relative power. + attributes: + name: + name: name + ifabsent: string(axis_1_power_map) + identifier: true + range: string + required: true + equals_string: axis_1_power_map + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + unit: + name: unit + description: Unit that axis data is stored in (e.g., degrees). + range: text + array: + name: array + array: + dimensions: + - alias: num_rows + - alias: num_cols + range: float32 + ImagingRetinotopy__axis_2_phase_map: + name: ImagingRetinotopy__axis_2_phase_map + description: Phase response to stimulus on the second measured axis. + attributes: + name: + name: name + ifabsent: string(axis_2_phase_map) + identifier: true + range: string + required: true + equals_string: axis_2_phase_map + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + unit: + name: unit + description: Unit that axis data is stored in (e.g., degrees). + range: text + array: + name: array + array: + dimensions: + - alias: num_rows + - alias: num_cols + range: float32 + ImagingRetinotopy__axis_2_power_map: + name: ImagingRetinotopy__axis_2_power_map + description: Power response on the second measured axis. Response is scaled so + 0.0 is no power in the response and 1.0 is maximum relative power. + attributes: + name: + name: name + ifabsent: string(axis_2_power_map) + identifier: true + range: string + required: true + equals_string: axis_2_power_map + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + unit: + name: unit + description: Unit that axis data is stored in (e.g., degrees). + range: text + array: + name: array + array: + dimensions: + - alias: num_rows + - alias: num_cols + range: float32 + ImagingRetinotopy__focal_depth_image: + name: ImagingRetinotopy__focal_depth_image + description: 'Gray-scale image taken with same settings/parameters (e.g., focal + depth, wavelength) as data collection. Array format: [rows][columns].' + attributes: + name: + name: name + ifabsent: string(focal_depth_image) + identifier: true + range: string + required: true + equals_string: focal_depth_image + bits_per_pixel: + name: bits_per_pixel + description: Number of bits used to represent each value. This is necessary + to determine maximum (white) pixel value. + range: int32 + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + focal_depth: + name: focal_depth + description: Focal depth offset, in meters. + range: float32 + format: + name: format + description: Format of image. Right now only 'raw' is supported. + range: text + array: + name: array + array: + dimensions: + - alias: num_rows + - alias: num_cols + range: uint16 + ImagingRetinotopy__sign_map: + name: ImagingRetinotopy__sign_map + description: Sine of the angle between the direction of the gradient in axis_1 + and axis_2. + attributes: + name: + name: name + ifabsent: string(sign_map) + identifier: true + range: string + required: true + equals_string: sign_map + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + array: + name: array + array: + dimensions: + - alias: num_rows + - alias: num_cols + range: float32 + ImagingRetinotopy__vasculature_image: + name: ImagingRetinotopy__vasculature_image + description: 'Gray-scale anatomical image of cortical surface. Array structure: + [rows][columns]' + attributes: + name: + name: name + ifabsent: string(vasculature_image) + identifier: true + range: string + required: true + equals_string: vasculature_image + bits_per_pixel: + name: bits_per_pixel + description: Number of bits used to represent each value. This is necessary + to determine maximum (white) pixel value + range: int32 + dimension: + name: dimension + description: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + range: int32 + field_of_view: + name: field_of_view + description: Size of viewing area, in meters. + range: float32 + format: + name: format + description: Format of image. Right now only 'raw' is supported. + range: text + array: + name: array + array: + dimensions: + - alias: num_rows + - alias: num_cols + range: uint16 diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/namespace.yaml new file mode 100644 index 0000000..bbd6f1f --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/core/v2_7_0/namespace.yaml @@ -0,0 +1,28 @@ +name: core +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: core +description: NWB namespace +id: core +version: 2.7.0 +imports: +- core.nwb.base +- core.nwb.device +- core.nwb.epoch +- core.nwb.image +- core.nwb.file +- core.nwb.misc +- core.nwb.behavior +- core.nwb.ecephys +- core.nwb.icephys +- core.nwb.ogen +- core.nwb.ophys +- core.nwb.retinotopy +- core.nwb.language +- ../../hdmf_common/v1_8_0/namespace +- ../../hdmf_experimental/v0_5_0/namespace +default_prefix: core/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml new file mode 100644 index 0000000..45192b2 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.base.yaml @@ -0,0 +1,48 @@ +name: hdmf-common.base +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.base +version: 1.8.0 +imports: +- hdmf-common.nwb.language +default_prefix: hdmf-common.base/ +classes: + Data: + name: Data + description: An abstract data type for a dataset. + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + Container: + name: Container + description: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + attributes: + name: + name: name + identifier: true + range: string + required: true + tree_root: true + SimpleMultiContainer: + name: SimpleMultiContainer + description: A simple Container for holding onto multiple containers. + is_a: Container + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: Container + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml new file mode 100644 index 0000000..d85e6fc --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.nwb.language.yaml @@ -0,0 +1,146 @@ +name: hdmf-common.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: core +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +enums: + FlatDType: + name: FlatDType + permissible_values: + float: + text: float + float32: + text: float32 + double: + text: double + float64: + text: float64 + long: + text: long + int64: + text: int64 + int: + text: int + int32: + text: int32 + int16: + text: int16 + short: + text: short + int8: + text: int8 + uint: + text: uint + uint32: + text: uint32 + uint16: + text: uint16 + uint8: + text: uint8 + uint64: + text: uint64 + numeric: + text: numeric + text: + text: text + utf: + text: utf + utf8: + text: utf8 + utf_8: + text: utf_8 + ascii: + text: ascii + bool: + text: bool + isodatetime: + text: isodatetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml new file mode 100644 index 0000000..2663c5b --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.sparse.yaml @@ -0,0 +1,60 @@ +name: hdmf-common.sparse +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.sparse +version: 1.8.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.sparse/ +classes: + CSRMatrix: + name: CSRMatrix + description: A compressed sparse row matrix. Data are stored in the standard CSR + format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] + and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + is_a: Container + attributes: + name: + name: name + identifier: true + range: string + required: true + shape: + name: shape + description: The shape (number of rows, number of columns) of this sparse + matrix. + range: uint + indices: + name: indices + description: The column indices. + multivalued: false + array: + dimensions: + - alias: number of non-zero values + range: uint + required: true + indptr: + name: indptr + description: The row index pointer. + multivalued: false + array: + dimensions: + - alias: number of rows in the matrix + 1 + range: uint + required: true + data: + name: data + description: The non-zero values in the matrix. + multivalued: false + array: + dimensions: + - alias: number of non-zero values + range: AnyType + required: true + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml new file mode 100644 index 0000000..c23cfd0 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/hdmf-common.table.yaml @@ -0,0 +1,190 @@ +name: hdmf-common.table +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-common +id: hdmf-common.table +version: 1.8.0 +imports: +- hdmf-common.base +- hdmf-common.nwb.language +default_prefix: hdmf-common.table/ +classes: + VectorData: + name: VectorData + description: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is along the rows + of the DynamicTable and each step along the first dimension is a cell of the + larger table. VectorData can also be used to represent a ragged array if paired + with a VectorIndex. This allows for storing arrays of varying length in a single + cell of the DynamicTable by indexing into this VectorData. The first vector + is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], + and so on. + is_a: Data + attributes: + name: + name: name + identifier: true + range: string + required: true + description: + name: description + description: Description of what these vectors represent. + range: text + array: + name: array + range: AnyType + any_of: + - array: + dimensions: + - alias: dim0 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - array: + dimensions: + - alias: dim0 + - alias: dim1 + - alias: dim2 + - alias: dim3 + tree_root: true + VectorIndex: + name: VectorIndex + description: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map between + the rows of a DynamicTable and the indices of the VectorData. The name of the + VectorIndex is expected to be the name of the target VectorData object followed + by "_index". + is_a: VectorData + attributes: + name: + name: name + identifier: true + range: string + required: true + target: + name: target + description: Reference to the target dataset that this index applies to. + range: VectorData + tree_root: true + ElementIdentifiers: + name: ElementIdentifiers + description: A list of unique identifiers for values within a dataset, e.g. rows + of a DynamicTable. + is_a: Data + attributes: + name: + name: name + ifabsent: string(element_id) + identifier: true + range: string + required: true + tree_root: true + DynamicTableRegion: + name: DynamicTableRegion + description: DynamicTableRegion provides a link from one table to an index or + region of another. The `table` attribute is a link to another `DynamicTable`, + indicating which table is referenced, and the data is int(s) indicating the + row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to + associate rows with repeated meta-data without data duplication. They can also + be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create + ragged references, so a single cell of a `DynamicTable` can reference many rows + of another `DynamicTable`. + is_a: VectorData + attributes: + name: + name: name + identifier: true + range: string + required: true + table: + name: table + description: Reference to the DynamicTable object that this region applies + to. + range: DynamicTable + description: + name: description + description: Description of what this table region points to. + range: text + tree_root: true + DynamicTable: + name: DynamicTable + description: A group containing multiple datasets that are aligned on the first + dimension (Currently, this requirement if left up to APIs to check and enforce). + These datasets represent different columns in the table. Apart from a column + that contains unique identifiers for each row, there are no other required datasets. + Users are free to add any number of custom VectorData objects (columns) here. + DynamicTable also supports ragged array columns, where each element can be of + a different size. To add a ragged array column, use a VectorIndex type to index + the corresponding VectorData type. See documentation for VectorData and VectorIndex + for more details. Unlike a compound data type, which is analogous to storing + an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. + This provides an alternative structure to choose from when optimizing storage + for anticipated access patterns. Additionally, this type provides a way of creating + a table without having to define a compound type up front. Although this convenience + may be attractive, users should think carefully about how data will be accessed. + DynamicTable is more appropriate for column-centric access, whereas a dataset + with a compound type would be more appropriate for row-centric access. Finally, + data size should also be taken into account. For small tables, performance loss + may be an acceptable trade-off for the flexibility of a DynamicTable. + is_a: Container + attributes: + name: + name: name + identifier: true + range: string + required: true + colnames: + name: colnames + description: The names of the columns in this table. This should be used to + specify an order to the columns. + range: text + description: + name: description + description: Description of what is in this dynamic table. + range: text + id: + name: id + description: Array of unique identifiers for the rows of this dynamic table. + multivalued: false + array: + dimensions: + - alias: num_rows + range: int + required: true + vector_data: + name: vector_data + description: Vector columns, including index columns, of this dynamic table. + multivalued: true + range: VectorData + required: false + tree_root: true + AlignedDynamicTable: + name: AlignedDynamicTable + description: DynamicTable container that supports storing a collection of sub-tables. + Each sub-table is a DynamicTable itself that is aligned with the main table + by row index. I.e., all DynamicTables stored in this group MUST have the same + number of rows. This type effectively defines a 2-level table in which the main + data is stored in the main table implemented by this type and additional columns + of the table are grouped into categories, with each category being represented + by a separate DynamicTable stored within the group. + is_a: DynamicTable + attributes: + children: + name: children + multivalued: true + inlined: true + inlined_as_list: false + any_of: + - range: DynamicTable + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/namespace.yaml new file mode 100644 index 0000000..27276b8 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_common/v1_8_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-common +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-common +description: Common data structures provided by HDMF +id: hdmf-common +version: 1.8.0 +imports: +- hdmf-common.base +- hdmf-common.table +- hdmf-common.sparse +- hdmf-common.nwb.language +default_prefix: hdmf-common/ diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml new file mode 100644 index 0000000..a5beacd --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.experimental.yaml @@ -0,0 +1,32 @@ +name: hdmf-experimental.experimental +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.experimental +version: 0.5.0 +imports: +- ../../hdmf_common/v1_8_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.experimental/ +classes: + EnumData: + name: EnumData + description: Data that come from a fixed set of values. A data value of i corresponds + to the i-th value in the VectorData referenced by the 'elements' attribute. + is_a: VectorData + attributes: + name: + name: name + identifier: true + range: string + required: true + elements: + name: elements + description: Reference to the VectorData object that contains the enumerable + elements + range: VectorData + tree_root: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml new file mode 100644 index 0000000..08b002e --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.nwb.language.yaml @@ -0,0 +1,146 @@ +name: hdmf-experimental.nwb.language +annotations: + is_namespace: + tag: is_namespace + value: 'False' + namespace: + tag: namespace + value: core +description: Adapter objects to mimic the behavior of elements in the nwb-schema-language +id: nwb.language +imports: +- linkml:types +prefixes: + linkml: + prefix_prefix: linkml + prefix_reference: https://w3id.org/linkml +default_prefix: nwb.language/ +types: + float32: + name: float32 + typeof: float + float64: + name: float64 + typeof: double + long: + name: long + typeof: integer + int64: + name: int64 + typeof: integer + int: + name: int + typeof: integer + int32: + name: int32 + typeof: integer + int16: + name: int16 + typeof: integer + short: + name: short + typeof: integer + int8: + name: int8 + typeof: integer + uint: + name: uint + typeof: integer + minimum_value: 0 + uint32: + name: uint32 + typeof: integer + minimum_value: 0 + uint16: + name: uint16 + typeof: integer + minimum_value: 0 + uint8: + name: uint8 + typeof: integer + minimum_value: 0 + uint64: + name: uint64 + typeof: integer + minimum_value: 0 + numeric: + name: numeric + typeof: float + text: + name: text + typeof: string + utf: + name: utf + typeof: string + utf8: + name: utf8 + typeof: string + utf_8: + name: utf_8 + typeof: string + ascii: + name: ascii + typeof: string + bool: + name: bool + typeof: boolean + isodatetime: + name: isodatetime + typeof: datetime +enums: + FlatDType: + name: FlatDType + permissible_values: + float: + text: float + float32: + text: float32 + double: + text: double + float64: + text: float64 + long: + text: long + int64: + text: int64 + int: + text: int + int32: + text: int32 + int16: + text: int16 + short: + text: short + int8: + text: int8 + uint: + text: uint + uint32: + text: uint32 + uint16: + text: uint16 + uint8: + text: uint8 + uint64: + text: uint64 + numeric: + text: numeric + text: + text: text + utf: + text: utf + utf8: + text: utf8 + utf_8: + text: utf_8 + ascii: + text: ascii + bool: + text: bool + isodatetime: + text: isodatetime +classes: + AnyType: + name: AnyType + description: Needed because some classes in hdmf-common are datasets without dtype + class_uri: linkml:Any diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml new file mode 100644 index 0000000..242b36b --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/hdmf-experimental.resources.yaml @@ -0,0 +1,228 @@ +name: hdmf-experimental.resources +annotations: + is_namespace: + tag: is_namespace + value: false + namespace: + tag: namespace + value: hdmf-experimental +id: hdmf-experimental.resources +version: 0.5.0 +imports: +- ../../hdmf_common/v1_8_0/namespace +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental.resources/ +classes: + HERD: + name: HERD + description: HDMF External Resources Data Structure. A set of six tables for tracking + external resource references in a file or across multiple files. + is_a: Container + attributes: + name: + name: name + identifier: true + range: string + required: true + keys: + name: keys + description: A table for storing user terms that are used to refer to external + resources. + multivalued: false + range: HERD__keys + required: true + files: + name: files + description: A table for storing object ids of files used in external resources. + multivalued: false + range: HERD__files + required: true + entities: + name: entities + description: A table for mapping user terms (i.e., keys) to resource entities. + multivalued: false + range: HERD__entities + required: true + objects: + name: objects + description: A table for identifying which objects in a file contain references + to external resources. + multivalued: false + range: HERD__objects + required: true + object_keys: + name: object_keys + description: A table for identifying which objects use which keys. + multivalued: false + range: HERD__object_keys + required: true + entity_keys: + name: entity_keys + description: A table for identifying which keys use which entity. + multivalued: false + range: HERD__entity_keys + required: true + tree_root: true + HERD__keys: + name: HERD__keys + description: A table for storing user terms that are used to refer to external + resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(keys) + identifier: true + range: string + required: true + equals_string: keys + key: + name: key + description: The user term that maps to one or more resources in the `resources` + table, e.g., "human". + multivalued: false + range: text + required: true + HERD__files: + name: HERD__files + description: A table for storing object ids of files used in external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(files) + identifier: true + range: string + required: true + equals_string: files + file_object_id: + name: file_object_id + description: The object id (UUID) of a file that contains objects that refers + to external resources. + multivalued: false + range: text + required: true + HERD__entities: + name: HERD__entities + description: A table for mapping user terms (i.e., keys) to resource entities. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entities) + identifier: true + range: string + required: true + equals_string: entities + entity_id: + name: entity_id + description: The compact uniform resource identifier (CURIE) of the entity, + in the form [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'. + multivalued: false + range: text + required: true + entity_uri: + name: entity_uri + description: The URI for the entity this reference applies to. This can be + an empty string. e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606 + multivalued: false + range: text + required: true + HERD__objects: + name: HERD__objects + description: A table for identifying which objects in a file contain references + to external resources. + is_a: Data + attributes: + name: + name: name + ifabsent: string(objects) + identifier: true + range: string + required: true + equals_string: objects + files_idx: + name: files_idx + description: The row index to the file in the `files` table containing the + object. + multivalued: false + range: uint + required: true + object_id: + name: object_id + description: The object id (UUID) of the object. + multivalued: false + range: text + required: true + object_type: + name: object_type + description: The data type of the object. + multivalued: false + range: text + required: true + relative_path: + name: relative_path + description: The relative path from the data object with the `object_id` to + the dataset or attribute with the value(s) that is associated with an external + resource. This can be an empty string if the object is a dataset that contains + the value(s) that is associated with an external resource. + multivalued: false + range: text + required: true + field: + name: field + description: The field within the compound data type using an external resource. + This is used only if the dataset or attribute is a compound data type; otherwise + this should be an empty string. + multivalued: false + range: text + required: true + HERD__object_keys: + name: HERD__object_keys + description: A table for identifying which objects use which keys. + is_a: Data + attributes: + name: + name: name + ifabsent: string(object_keys) + identifier: true + range: string + required: true + equals_string: object_keys + objects_idx: + name: objects_idx + description: The row index to the object in the `objects` table that holds + the key + multivalued: false + range: uint + required: true + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + multivalued: false + range: uint + required: true + HERD__entity_keys: + name: HERD__entity_keys + description: A table for identifying which keys use which entity. + is_a: Data + attributes: + name: + name: name + ifabsent: string(entity_keys) + identifier: true + range: string + required: true + equals_string: entity_keys + entities_idx: + name: entities_idx + description: The row index to the entity in the `entities` table. + multivalued: false + range: uint + required: true + keys_idx: + name: keys_idx + description: The row index to the key in the `keys` table. + multivalued: false + range: uint + required: true diff --git a/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/namespace.yaml b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/namespace.yaml new file mode 100644 index 0000000..1957023 --- /dev/null +++ b/nwb_linkml/src/nwb_linkml/schema/linkml/hdmf_experimental/v0_5_0/namespace.yaml @@ -0,0 +1,17 @@ +name: hdmf-experimental +annotations: + is_namespace: + tag: is_namespace + value: true + namespace: + tag: namespace + value: hdmf-experimental +description: Experimental data structures provided by HDMF. These are not guaranteed + to be available in the future. +id: hdmf-experimental +version: 0.5.0 +imports: +- hdmf-experimental.experimental +- hdmf-experimental.resources +- hdmf-experimental.nwb.language +default_prefix: hdmf-experimental/